Documentation
¶
Overview ¶
Package goai provides utilities for text chunking and AI-powered text processing. It offers flexible interfaces for breaking down large texts into manageable chunks while preserving context and meaning.
Package goai provides a flexible interface for interacting with various Language Learning Models (LLMs).
Package goai provides artificial intelligence utilities including embedding generation capabilities.
Package goai provides utilities for text chunking and AI-powered text processing.
Example:
This example demonstrates how to set up a basic JSON-based server using add a custom tool for weather information retrieval, display a resource, and add a prompt.
The "WeatherTool" fetches the weather based on location input.
package main import ( "context" "encoding/json" "fmt" "log" "os" ) func main() { // Define a custom tool for fetching weather. weatherTool := Tool{ Name: "get_weather", Description: "Get the current weather for a given location.", InputSchema: json.RawMessage(`{ "type": "object", "properties": { "location": { "type": "string", "description": "The city and state, e.g. San Francisco, CA" } }, "required": ["location"] }`), Handler: func(ctx context.Context, params CallToolParams) (CallToolResult, error) { var input struct { Location string `json:"location"` } if err := json.Unmarshal(params.Arguments, &input); err != nil { return CallToolResult{}, err } return CallToolResult{ Content: []ToolResultContent{ { Type: "text", Text: fmt.Sprintf("Weather in %s: Sunny, 72°F", input.Location), }, }, }, nil }, } // Define resources, prompts, and configure the base server. resources := []Resource{ { URI: "file:///tmp/hello_world.txt", Name: "Hello World", Description: "A sample text document for testing", MimeType: "text/plain", TextContent: "This is the content of the sample document.", }, } prompt := Prompt{ Name: "code_review", Description: "Performs a detailed code review and suggesting improvements", Arguments: []PromptArgument{ { Name: "code", Description: "Source code to be reviewed", Required: true, }, }, Messages: []PromptMessage{ { Role: "user", Content: PromptContent{ Type: "text", Text: "Please review this code: {{code}}", }, }, }, } baseServer, err := NewBaseServer( UseLogger(NewNullLogger()), ) if err != nil { panic(err) } err = baseServer.AddTools(weatherTool) if err != nil { panic(err) } err = baseServer.AddPrompts(prompt) if err != nil { panic(err) } err = baseServer.AddResources(resources...) if err != nil { panic(err) } server := NewStdIOServer( baseServer, os.Stdin, os.Stdout, ) ctx := context.Background() if err := server.Run(ctx); err != nil { panic(err) } }
Package goai provides utilities for AI-powered text processing and response extraction.
Package goai provides artificial intelligence utilities including vector storage capabilities.
Package goai provides artificial intelligence utilities including vector storage capabilities.
Package goai provides artificial intelligence utilities including vector storage capabilities.
Index ¶
- Constants
- Variables
- func ConvertToGenaiTool(customTool Tool) (*genai.Tool, error)
- func StartSpan(ctx context.Context, name string, opts ...trace.SpanStartOption) (context.Context, trace.Span)
- type AnthropicClient
- type AnthropicClientProvider
- type AnthropicLLMProvider
- type AnthropicProviderConfig
- type BaseServer
- func (s *BaseServer) AddPrompts(prompts ...Prompt) error
- func (s *BaseServer) AddResources(resources ...Resource) error
- func (s *BaseServer) AddTools(tools ...Tool) error
- func (s *BaseServer) CallTool(ctx context.Context, params CallToolParams) (CallToolResult, error)
- func (s *BaseServer) ListPrompts(ctx context.Context, cursor string, limit int) ListPromptsResult
- func (s *BaseServer) ListResources(ctx context.Context, cursor string, limit int) ListResourcesResult
- func (s *BaseServer) ListTools(ctx context.Context, cursor string, limit int) ListToolsResult
- func (s *BaseServer) LogMessage(level LogLevel, loggerName string, data interface{})
- func (s *BaseServer) ReadResource(ctx context.Context, params ReadResourceParams) (ReadResourceResult, error)
- func (s *BaseServer) SendPromptListChangedNotification()
- func (s *BaseServer) SendToolListChangedNotification()
- type BedrockClient
- type BedrockClientWrapper
- func (w *BedrockClientWrapper) Converse(ctx context.Context, params *bedrockruntime.ConverseInput, ...) (*bedrockruntime.ConverseOutput, error)
- func (w *BedrockClientWrapper) ConverseStream(ctx context.Context, params *bedrockruntime.ConverseStreamInput, ...) (*bedrockruntime.ConverseStreamOutput, error)
- func (w *BedrockClientWrapper) InvokeModel(ctx context.Context, params *bedrockruntime.InvokeModelInput, ...) (*bedrockruntime.InvokeModelOutput, error)
- type BedrockEmbeddingProvider
- type BedrockLLMProvider
- type BedrockProviderConfig
- type CallToolParams
- type CallToolResult
- type Capabilities
- type CapabilitiesLogging
- type CapabilitiesPrompts
- type CapabilitiesResources
- type CapabilitiesTools
- type ChatHistory
- type ChatHistoryMessage
- type ChatHistoryStorage
- type ChatSessionService
- type ChunkingByLLMProvider
- type ChunkingProvider
- type Client
- func (c *Client) CallTool(ctx context.Context, params CallToolParams) (CallToolResult, error)
- func (c *Client) Close(ctx context.Context) error
- func (c *Client) Connect(ctx context.Context) error
- func (c *Client) GetCapabilities() Capabilities
- func (c *Client) GetPrompt(ctx context.Context, params GetPromptParams) ([]PromptMessage, error)
- func (c *Client) GetProtocolVersion() string
- func (c *Client) GetState() ConnectionState
- func (c *Client) IsInitialized() bool
- func (c *Client) ListPrompts(ctx context.Context) ([]Prompt, error)
- func (c *Client) ListTools(ctx context.Context) ([]Tool, error)
- type ClientConfig
- type ConnectionError
- type ConnectionState
- type DefaultLogger
- func (l *DefaultLogger) Debug(args ...interface{})
- func (l *DefaultLogger) Error(args ...interface{})
- func (l *DefaultLogger) Info(args ...interface{})
- func (l *DefaultLogger) Warn(args ...interface{})
- func (l *DefaultLogger) WithContext(ctx context.Context) Logger
- func (l *DefaultLogger) WithErr(err error) Logger
- func (l *DefaultLogger) WithFields(fields map[string]interface{}) Logger
- type EmbeddingModel
- type EmbeddingObject
- type EmbeddingProvider
- type EmbeddingResponse
- type EmbeddingService
- type Error
- type GeminiModelService
- type GeminiProvider
- func (p *GeminiProvider) Close() error
- func (p *GeminiProvider) GetResponse(ctx context.Context, messages []LLMMessage, config LLMRequestConfig) (LLMResponse, error)
- func (p *GeminiProvider) GetStreamingResponse(ctx context.Context, messages []LLMMessage, config LLMRequestConfig) (<-chan StreamingLLMResponse, error)
- type GeminiRole
- type GetPromptParams
- type GoogleGeminiChatSessionService
- type GoogleGeminiService
- func (g *GoogleGeminiService) ConfigureModel(config *genai.GenerationConfig, tools []*genai.Tool) error
- func (g *GoogleGeminiService) GenerateContentStream(ctx context.Context, parts ...genai.Part) (StreamIteratorService, error)
- func (g *GoogleGeminiService) StartChat(initialHistory []*genai.Content) ChatSessionService
- type GoogleGeminiStreamIteratorService
- type InMemoryChatHistoryStorage
- func (s *InMemoryChatHistoryStorage) AddMessage(ctx context.Context, sessionID string, message ChatHistoryMessage) error
- func (s *InMemoryChatHistoryStorage) CreateChat(ctx context.Context) (*ChatHistory, error)
- func (s *InMemoryChatHistoryStorage) DeleteChat(ctx context.Context, sessionID string) error
- func (s *InMemoryChatHistoryStorage) GetChat(ctx context.Context, sessionID string) (*ChatHistory, error)
- func (s *InMemoryChatHistoryStorage) ListChatHistories(ctx context.Context) ([]ChatHistory, error)
- func (s *InMemoryChatHistoryStorage) UpdateChatMetadata(ctx context.Context, sessionID string, metadata map[string]interface{}) error
- type InitializeParams
- type InitializeResponse
- type InitializeResult
- type IntermediateJSONSchema
- type JSONExtractor
- type LLMError
- type LLMMessage
- type LLMMessageRole
- type LLMPromptTemplate
- type LLMProvider
- type LLMRequest
- type LLMRequestConfig
- type LLMResponse
- type ListParams
- type ListPromptsResult
- type ListResourcesResult
- type ListToolsResult
- type LogLevel
- type LogManager
- type LogMessageParams
- type Logger
- type LogrusLogger
- func (l *LogrusLogger) Debug(args ...interface{})
- func (l *LogrusLogger) Error(args ...interface{})
- func (l *LogrusLogger) Info(args ...interface{})
- func (l *LogrusLogger) Warn(args ...interface{})
- func (l *LogrusLogger) WithContext(ctx context.Context) Logger
- func (l *LogrusLogger) WithErr(err error) Logger
- func (l *LogrusLogger) WithFields(fields map[string]interface{}) Logger
- type MockBedrockClient
- func (_m *MockBedrockClient) Converse(ctx context.Context, params *bedrockruntime.ConverseInput, ...) (*bedrockruntime.ConverseOutput, error)
- func (_m *MockBedrockClient) ConverseStream(ctx context.Context, params *bedrockruntime.ConverseStreamInput, ...) (*bedrockruntime.ConverseStreamOutput, error)
- func (_m *MockBedrockClient) InvokeModel(ctx context.Context, params *bedrockruntime.InvokeModelInput, ...) (*bedrockruntime.InvokeModelOutput, error)
- type NoOpsLLMProvider
- type NoOpsOption
- type Notification
- type NullLogger
- func (l *NullLogger) Debug(args ...interface{})
- func (l *NullLogger) Error(args ...interface{})
- func (l *NullLogger) Info(args ...interface{})
- func (l *NullLogger) Warn(args ...interface{})
- func (l *NullLogger) WithContext(ctx context.Context) Logger
- func (l *NullLogger) WithErr(err error) Logger
- func (l *NullLogger) WithFields(fields map[string]interface{}) Logger
- type Offset
- type OpenAIClient
- type OpenAIClientProvider
- type OpenAICompatibleEmbeddingProvider
- type OpenAILLMProvider
- type OpenAIProviderConfig
- type PostgresProvider
- func (p *PostgresProvider) Close() error
- func (p *PostgresProvider) CreateCollection(ctx context.Context, config *VectorCollectionConfig) error
- func (p *PostgresProvider) DeleteCollection(ctx context.Context, name string) error
- func (p *PostgresProvider) DeleteDocument(ctx context.Context, collection, id string) error
- func (p *PostgresProvider) GetDocument(ctx context.Context, collection, id string) (*VectorDocument, error)
- func (p *PostgresProvider) Initialize(ctx context.Context) error
- func (p *PostgresProvider) ListCollections(ctx context.Context) ([]string, error)
- func (p *PostgresProvider) SearchByID(ctx context.Context, collection, id string, opts *VectorSearchOptions) ([]VectorSearchResult, error)
- func (p *PostgresProvider) SearchByVector(ctx context.Context, collection string, vector []float32, ...) ([]VectorSearchResult, error)
- func (p *PostgresProvider) UpsertDocument(ctx context.Context, collection string, doc *VectorDocument) error
- func (p *PostgresProvider) UpsertDocuments(ctx context.Context, collection string, docs []*VectorDocument) error
- type PostgresStorageConfig
- type Prompt
- type PromptArgument
- type PromptContent
- type PromptGetResponse
- type PromptMessage
- type ReadResourceParams
- type ReadResourceResult
- type Request
- type RequestOption
- func UseToolsProvider(provider *ToolsProvider) RequestOption
- func WithAllowedTools(allowedTools []string) RequestOption
- func WithMaxIterations(maxIterations int) RequestOption
- func WithMaxToken(maxToken int64) RequestOption
- func WithTemperature(temp float64) RequestOption
- func WithThinkingEnabled(thinkingBudget int64) RequestOption
- func WithTopK(topK int64) RequestOption
- func WithTopP(topP float64) RequestOption
- func WithTracingEnabled() RequestOption
- type Resource
- type ResourceContent
- type Response
- type ResponseExtractor
- type ResponseToStartupClientRequest
- type RetryStrategy
- type SQLiteChatHistoryStorage
- func (s *SQLiteChatHistoryStorage) AddMessage(ctx context.Context, sessionID string, message ChatHistoryMessage) error
- func (s *SQLiteChatHistoryStorage) Close() error
- func (s *SQLiteChatHistoryStorage) CreateChat(ctx context.Context) (*ChatHistory, error)
- func (s *SQLiteChatHistoryStorage) DeleteChat(ctx context.Context, sessionID string) error
- func (s *SQLiteChatHistoryStorage) GetChat(ctx context.Context, sessionID string) (*ChatHistory, error)
- func (s *SQLiteChatHistoryStorage) ListChatHistories(ctx context.Context) ([]ChatHistory, error)
- func (s *SQLiteChatHistoryStorage) UpdateChatMetadata(ctx context.Context, sessionID string, metadata map[string]interface{}) error
- type SSEConfig
- type SSEServer
- type SSETransport
- type ServerConfig
- type ServerConfigOption
- type ServerInfo
- type SetLogLevelParams
- type SlogLogger
- func (l *SlogLogger) Debug(args ...interface{})
- func (l *SlogLogger) Error(args ...interface{})
- func (l *SlogLogger) Info(args ...interface{})
- func (l *SlogLogger) Warn(args ...interface{})
- func (l *SlogLogger) WithContext(ctx context.Context) Logger
- func (l *SlogLogger) WithErr(err error) Logger
- func (l *SlogLogger) WithFields(fields map[string]interface{}) Logger
- type StdIOConfig
- type StdIOServer
- type StdIOTransport
- func (t *StdIOTransport) Close(ctx context.Context) error
- func (t *StdIOTransport) Connect(ctx context.Context, config ClientConfig) error
- func (t *StdIOTransport) SendMessage(ctx context.Context, message interface{}) error
- func (t *StdIOTransport) SetReceiveMessageCallback(callback func(message []byte))
- type StreamIteratorService
- type StreamingLLMResponse
- type Table
- type TableExtractor
- type TagExtractor
- type Tool
- type ToolHandler
- type ToolImplementation
- type ToolResultContent
- type ToolsProvider
- func (p *ToolsProvider) AddMCPClient(client *Client) error
- func (p *ToolsProvider) AddTools(tools []Tool) error
- func (p *ToolsProvider) ExecuteTool(ctx context.Context, params CallToolParams) (CallToolResult, error)
- func (p *ToolsProvider) ListTools(ctx context.Context, allowedTools []string) ([]Tool, error)
- type TracingLLMProvider
- type Transport
- type Usage
- type VectorCollectionConfig
- type VectorDistanceType
- type VectorDocument
- type VectorError
- type VectorFieldConfig
- type VectorIndexType
- type VectorSearchOptions
- type VectorSearchResult
- type VectorStorage
- func (s *VectorStorage) Close() error
- func (s *VectorStorage) CreateCollection(ctx context.Context, config *VectorCollectionConfig) error
- func (s *VectorStorage) DeleteCollection(ctx context.Context, name string) error
- func (s *VectorStorage) DeleteDocument(ctx context.Context, collection, id string) error
- func (s *VectorStorage) GetDocument(ctx context.Context, collection, id string) (*VectorDocument, error)
- func (s *VectorStorage) ListCollections(ctx context.Context) ([]string, error)
- func (s *VectorStorage) SearchByID(ctx context.Context, collection, id string, opts *VectorSearchOptions) ([]VectorSearchResult, error)
- func (s *VectorStorage) SearchByVector(ctx context.Context, collection string, vector []float32, ...) ([]VectorSearchResult, error)
- func (s *VectorStorage) UpsertDocument(ctx context.Context, collection string, doc *VectorDocument) error
- func (s *VectorStorage) UpsertDocuments(ctx context.Context, collection string, docs []*VectorDocument) error
- type VectorStorageProvider
- type VectorValidator
- type XMLExtractor
- type ZapLogger
- func (l *ZapLogger) Debug(args ...interface{})
- func (l *ZapLogger) Error(args ...interface{})
- func (l *ZapLogger) Info(args ...interface{})
- func (l *ZapLogger) Warn(args ...interface{})
- func (l *ZapLogger) WithContext(ctx context.Context) Logger
- func (l *ZapLogger) WithErr(err error) Logger
- func (l *ZapLogger) WithFields(fields map[string]interface{}) Logger
Examples ¶
Constants ¶
const ( // UserRole represents a message from the user UserRole LLMMessageRole = "user" // AssistantRole represents a message from the assistant AssistantRole LLMMessageRole = "assistant" // SystemRole represents a message from the system SystemRole LLMMessageRole = "system" // DefaultMaxToken is the default maximum number of tokens for LLM responses DefaultMaxToken int64 = 1000 // DefaultTopP is the default top-p sampling value for LLM responses DefaultTopP float64 = 0.5 // DefaultTemperature is the default temperature value for LLM responses DefaultTemperature float64 = 0.5 // DefaultTopK is the default top-k sampling value for LLM responses DefaultTopK int64 = 40 // DefaultMaxIterations is the default maximum number of iterations for LLM responses DefaultMaxIterations int = 25 )
const ( ErrCodeNotFound = 404 ErrCodeInvalidDimension = 400 ErrCodeInvalidConfig = 401 ErrCodeCollectionExists = 402 ErrCodeCollectionNotFound = 403 ErrCodeInvalidVector = 405 ErrCodeConnectionFailed = 500 ErrCodeOperationFailed = 501 )
Common vector storage error codes
const ( // ErrorLogField is the key used for error fields in logs ErrorLogField string = "error" )
const (
ProtocolVersion = "2024-11-05"
)
Variables ¶
var ( ErrDocumentNotFound = &VectorError{Code: ErrCodeNotFound, Message: "document not found"} ErrCollectionNotFound = &VectorError{Code: ErrCodeCollectionNotFound, Message: "collection not found"} ErrCollectionExists = &VectorError{Code: ErrCodeCollectionExists, Message: "collection already exists"} ErrInvalidDimension = &VectorError{Code: ErrCodeInvalidDimension, Message: "invalid vector dimension"} ErrInvalidConfig = &VectorError{Code: ErrCodeInvalidConfig, Message: "invalid configuration"} ErrConnectionFailed = &VectorError{Code: ErrCodeConnectionFailed, Message: "failed to connect to storage"} ErrInvalidVector = &VectorError{Code: ErrCodeInvalidVector, Message: "invalid vector format or dimension"} )
Common vector storage errors
var DefaultConfig = LLMRequestConfig{ // contains filtered or unexported fields }
DefaultConfig holds the default values for LLMRequestConfig
Functions ¶
func ConvertToGenaiTool ¶ added in v0.13.0
Types ¶
type AnthropicClient ¶
type AnthropicClient struct {
// contains filtered or unexported fields
}
AnthropicClient implements the AnthropicClientProvider interface using Anthropic's official SDK.
func NewAnthropicClient ¶ added in v0.1.0
func NewAnthropicClient(apiKey string) *AnthropicClient
NewAnthropicClient creates a new instance of AnthropicClient with the provided API key.
Example usage:
// Regular message generation client := NewAnthropicClient("your-api-key") provider := NewAnthropicLLMProvider(AnthropicProviderConfig{ Client: client, Model: "claude-3-sonnet-20240229", }) // Streaming message generation streamingResp, err := provider.GetStreamingResponse(ctx, messages, config) if err != nil { log.Fatal(err) } for chunk := range streamingResp { fmt.Print(chunk.Text) }
func (*AnthropicClient) CreateMessage ¶
func (c *AnthropicClient) CreateMessage(ctx context.Context, params anthropic.MessageNewParams) (*anthropic.Message, error)
CreateMessage implements the AnthropicClientProvider interface using the Anthropic client.
func (*AnthropicClient) CreateStreamingMessage ¶
func (c *AnthropicClient) CreateStreamingMessage(ctx context.Context, params anthropic.MessageNewParams) *ssestream.Stream[anthropic.MessageStreamEvent]
CreateStreamingMessage implements the streaming support for the AnthropicClientProvider interface.
type AnthropicClientProvider ¶ added in v0.1.0
type AnthropicClientProvider interface { // CreateMessage creates a new message using Anthropic's API. // The method takes a context and MessageNewParams and returns a Message response or an error. CreateMessage(ctx context.Context, params anthropic.MessageNewParams) (*anthropic.Message, error) // CreateStreamingMessage creates a streaming message using Anthropic's API. // It returns a stream that can be used to receive message chunks as they're generated. CreateStreamingMessage(ctx context.Context, params anthropic.MessageNewParams) *ssestream.Stream[anthropic.MessageStreamEvent] }
AnthropicClientProvider defines the interface for interacting with Anthropic's API. This interface abstracts the essential message-related operations used by AnthropicLLMProvider.
type AnthropicLLMProvider ¶
type AnthropicLLMProvider struct {
// contains filtered or unexported fields
}
AnthropicLLMProvider implements the LLMProvider interface using Anthropic's official Go SDK. It provides access to Claude models through Anthropic's API.
func NewAnthropicLLMProvider ¶
func NewAnthropicLLMProvider(config AnthropicProviderConfig, log Logger) *AnthropicLLMProvider
NewAnthropicLLMProvider creates a new Anthropic provider with the specified configuration. If no model is specified, it defaults to Claude 3.5 Sonnet.
Example usage:
client := NewAnthropicClient("your-api-key") provider := NewAnthropicLLMProvider(AnthropicProviderConfig{ Client: client, Model: anthropic.ModelClaude_3_5_Sonnet_20240620, }) response, err := provider.GetResponse(messages, config) if err != nil { log.Fatal(err) }
func (*AnthropicLLMProvider) GetResponse ¶
func (p *AnthropicLLMProvider) GetResponse(ctx context.Context, messages []LLMMessage, config LLMRequestConfig) (LLMResponse, error)
GetResponse generates a response using Anthropic's API for the given messages and configuration. It supports different message roles (user, assistant, system) and handles them appropriately. System messages are handled separately through Anthropic's system parameter.
func (*AnthropicLLMProvider) GetStreamingResponse ¶
func (p *AnthropicLLMProvider) GetStreamingResponse(ctx context.Context, messages []LLMMessage, config LLMRequestConfig) (<-chan StreamingLLMResponse, error)
GetStreamingResponse handles streaming LLM responses with tool usage capabilities
type AnthropicProviderConfig ¶
type AnthropicProviderConfig struct { // Client is the AnthropicClientProvider implementation to use Client AnthropicClientProvider // Model specifies which Anthropic model to use (e.g., "claude-3-opus-20240229", "claude-3-sonnet-20240229") Model anthropic.Model }
AnthropicProviderConfig holds the configuration options for creating an Anthropic provider.
type BaseServer ¶ added in v0.19.0
type BaseServer struct { ServerInfo struct { Name string `json:"name"` Version string `json:"version"` } // contains filtered or unexported fields }
BaseServer contains the common fields and methods for all MCP server implementations.
func NewBaseServer ¶ added in v0.19.0
func NewBaseServer(opts ...ServerConfigOption) (*BaseServer, error)
NewBaseServer creates a new BaseServer instance with the given options
func (*BaseServer) AddPrompts ¶ added in v0.19.0
func (s *BaseServer) AddPrompts(prompts ...Prompt) error
func (*BaseServer) AddResources ¶ added in v0.19.0
func (s *BaseServer) AddResources(resources ...Resource) error
func (*BaseServer) AddTools ¶ added in v0.19.0
func (s *BaseServer) AddTools(tools ...Tool) error
func (*BaseServer) CallTool ¶ added in v0.19.0
func (s *BaseServer) CallTool(ctx context.Context, params CallToolParams) (CallToolResult, error)
func (*BaseServer) ListPrompts ¶ added in v0.19.0
func (s *BaseServer) ListPrompts(ctx context.Context, cursor string, limit int) ListPromptsResult
ListPrompts returns a list of all available prompts, with optional pagination
func (*BaseServer) ListResources ¶ added in v0.19.0
func (s *BaseServer) ListResources(ctx context.Context, cursor string, limit int) ListResourcesResult
ListResources returns a list of all resources, with optional pagination.
func (*BaseServer) ListTools ¶ added in v0.19.0
func (s *BaseServer) ListTools(ctx context.Context, cursor string, limit int) ListToolsResult
func (*BaseServer) LogMessage ¶ added in v0.19.0
func (s *BaseServer) LogMessage(level LogLevel, loggerName string, data interface{})
LogMessage logs a message.
func (*BaseServer) ReadResource ¶ added in v0.19.0
func (s *BaseServer) ReadResource(ctx context.Context, params ReadResourceParams) (ReadResourceResult, error)
ReadResource implementation with proper error handling and URI validation
func (*BaseServer) SendPromptListChangedNotification ¶ added in v0.19.0
func (s *BaseServer) SendPromptListChangedNotification()
SendPromptListChangedNotification sends a notification that the prompt list has changed.
func (*BaseServer) SendToolListChangedNotification ¶ added in v0.19.0
func (s *BaseServer) SendToolListChangedNotification()
SendToolListChangedNotification sends a notification that the tool list has changed.
type BedrockClient ¶ added in v0.18.0
type BedrockClient interface { Converse(ctx context.Context, params *bedrockruntime.ConverseInput, optFns ...func(*bedrockruntime.Options)) (*bedrockruntime.ConverseOutput, error) ConverseStream(ctx context.Context, params *bedrockruntime.ConverseStreamInput, optFns ...func(*bedrockruntime.Options)) (*bedrockruntime.ConverseStreamOutput, error) InvokeModel(ctx context.Context, params *bedrockruntime.InvokeModelInput, optFns ...func(*bedrockruntime.Options)) (*bedrockruntime.InvokeModelOutput, error) }
BedrockClient interface for AWS Bedrock operations
func NewBedrockClientWrapper ¶ added in v0.18.0
func NewBedrockClientWrapper(client *bedrockruntime.Client) BedrockClient
NewBedrockClientWrapper creates a new wrapper for bedrockruntime.Client
type BedrockClientWrapper ¶ added in v0.18.0
type BedrockClientWrapper struct {
// contains filtered or unexported fields
}
BedrockClientWrapper wraps the bedrockruntime.Client to implement the BedrockClient interface
func (*BedrockClientWrapper) Converse ¶ added in v0.18.0
func (w *BedrockClientWrapper) Converse(ctx context.Context, params *bedrockruntime.ConverseInput, optFns ...func(*bedrockruntime.Options)) (*bedrockruntime.ConverseOutput, error)
Converse implements the BedrockClient interface
func (*BedrockClientWrapper) ConverseStream ¶ added in v0.18.0
func (w *BedrockClientWrapper) ConverseStream(ctx context.Context, params *bedrockruntime.ConverseStreamInput, optFns ...func(*bedrockruntime.Options)) (*bedrockruntime.ConverseStreamOutput, error)
ConverseStream implements the BedrockClient interface
func (*BedrockClientWrapper) InvokeModel ¶ added in v0.18.0
func (w *BedrockClientWrapper) InvokeModel(ctx context.Context, params *bedrockruntime.InvokeModelInput, optFns ...func(*bedrockruntime.Options)) (*bedrockruntime.InvokeModelOutput, error)
InvokeModel implements the BedrockClient interface
type BedrockEmbeddingProvider ¶ added in v0.18.0
type BedrockEmbeddingProvider struct {
// contains filtered or unexported fields
}
BedrockEmbeddingProvider implements the EmbeddingProvider interface using AWS Bedrock.
func NewBedrockEmbeddingProvider ¶ added in v0.18.0
func NewBedrockEmbeddingProvider(bedrockClient BedrockClient) (*BedrockEmbeddingProvider, error)
NewBedrockEmbeddingProvider creates a new BedrockEmbeddingProvider.
func NewBedrockEmbeddingProviderWithClient ¶ added in v0.18.0
func NewBedrockEmbeddingProviderWithClient(client BedrockClient) *BedrockEmbeddingProvider
NewBedrockEmbeddingProviderWithClient creates a new provider with an existing Bedrock client.
func (*BedrockEmbeddingProvider) Generate ¶ added in v0.18.0
func (b *BedrockEmbeddingProvider) Generate(ctx context.Context, input interface{}, model EmbeddingModel) (*EmbeddingResponse, error)
Generate creates embedding vectors using AWS Bedrock.
type BedrockLLMProvider ¶
type BedrockLLMProvider struct {
// contains filtered or unexported fields
}
BedrockLLMProvider implements the LLMProvider interface using AWS Bedrock's official Go SDK.
func NewBedrockLLMProvider ¶
func NewBedrockLLMProvider(config BedrockProviderConfig, log Logger) *BedrockLLMProvider
NewBedrockLLMProvider creates a new Bedrock provider with the specified configuration. If no model is specified, it defaults to Claude 3.5 Sonnet.
func (*BedrockLLMProvider) GetResponse ¶
func (p *BedrockLLMProvider) GetResponse(ctx context.Context, messages []LLMMessage, config LLMRequestConfig) (LLMResponse, error)
GetResponse generates a response using Bedrock's Converse API for the given messages and configuration. It supports different message roles (user, assistant), system messages, and tool calling. It handles multi-turn conversations automatically when tools are used.
func (*BedrockLLMProvider) GetStreamingResponse ¶ added in v0.2.0
func (p *BedrockLLMProvider) GetStreamingResponse(ctx context.Context, messages []LLMMessage, config LLMRequestConfig) (<-chan StreamingLLMResponse, error)
GetStreamingResponse generates a streaming response using AWS Bedrock's API for the given messages and configuration. It returns a channel that receives chunks of the response as they're generated.
The method supports different message roles (user, assistant) and handles context cancellation. The returned channel will be closed when the response is complete or if an error occurs.
The returned StreamingLLMResponse contains:
- Text: The text chunk from the model
- Done: Boolean indicating if this is the final message
- Error: Any error that occurred during streaming
- TokenCount: Number of tokens in this chunk
Example:
package main import ( "context" "fmt" "github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/service/bedrockruntime" "github.com/shaharia-lab/goai" ) func main() { // Create Bedrock LLM Provider llmProvider := NewBedrockLLMProvider(BedrockProviderConfig{ Client: bedrockruntime.New(aws.Config{}), Model: "anthropic.claude-3-sonnet-20240229-v1:0", }) // Configure LLM Request llm := NewLLMRequest(NewRequestConfig( WithMaxToken(100), WithTemperature(0.7), ), llmProvider) // Generate streaming response stream, err := llm.GenerateStream(context.Background(), []LLMMessage{ {Role: UserRole, Text: "Explain quantum computing"}, }) if err != nil { panic(err) } for resp := range stream { if resp.Error != nil { fmt.Printf("Error: %v\n", resp.Error) break } if resp.Done { break } fmt.Print(resp.Text) } }
Note: The streaming response must be fully consumed or the context must be cancelled to prevent resource leaks.
type BedrockProviderConfig ¶
type BedrockProviderConfig struct { Client BedrockClient Model string }
BedrockProviderConfig holds the configuration options for creating a Bedrock provider
type CallToolParams ¶ added in v0.19.0
type CallToolParams struct { Name string `json:"name"` Arguments json.RawMessage `json:"arguments"` }
CallToolParams represents parameters for calling a tool.
type CallToolResult ¶ added in v0.19.0
type CallToolResult struct { Content []ToolResultContent `json:"content"` IsError bool `json:"isError"` }
CallToolResult represents the result of calling a tool.
type Capabilities ¶ added in v0.19.0
type Capabilities struct { Logging CapabilitiesLogging `json:"logging"` Prompts CapabilitiesPrompts `json:"prompts"` Resources CapabilitiesResources `json:"resources"` Tools CapabilitiesTools `json:"tools"` }
type CapabilitiesLogging ¶ added in v0.19.0
type CapabilitiesLogging struct{}
type CapabilitiesPrompts ¶ added in v0.19.0
type CapabilitiesPrompts struct {
ListChanged bool `json:"listChanged"`
}
type CapabilitiesResources ¶ added in v0.19.0
type CapabilitiesTools ¶ added in v0.19.0
type CapabilitiesTools struct {
ListChanged bool `json:"listChanged"`
}
type ChatHistory ¶ added in v0.11.0
type ChatHistory struct { SessionID string `json:"session_id"` Messages []ChatHistoryMessage `json:"messages"` CreatedAt time.Time `json:"created_at"` Metadata map[string]interface{} `json:"metadata"` }
ChatHistory defines the interface for conversation history storage
type ChatHistoryMessage ¶ added in v0.11.0
type ChatHistoryStorage ¶ added in v0.11.0
type ChatHistoryStorage interface { // CreateChat initializes a new chat conversation CreateChat(ctx context.Context) (*ChatHistory, error) // AddMessage adds a new message to an existing conversation AddMessage(ctx context.Context, sessionID string, message ChatHistoryMessage) error // GetChat retrieves a conversation by its ChatUUID GetChat(ctx context.Context, sessionID string) (*ChatHistory, error) // ListChatHistories returns all stored conversations ListChatHistories(ctx context.Context) ([]ChatHistory, error) // DeleteChat removes a conversation by its ChatUUID DeleteChat(ctx context.Context, sessionID string) error }
ChatHistoryStorage defines the interface for conversation history storage
type ChatSessionService ¶ added in v0.13.0
type ChatSessionService interface { SendMessage(ctx context.Context, parts ...genai.Part) (*genai.GenerateContentResponse, error) GetHistory() []*genai.Content AppendHistory(content *genai.Content) }
ChatSessionService defines the interface for chat session management
type ChunkingByLLMProvider ¶
type ChunkingByLLMProvider struct {
// contains filtered or unexported fields
}
ChunkingByLLMProvider implements ChunkingProvider using a language model to intelligently split text while preserving context and meaning.
func NewChunkingByLLMProvider ¶
func NewChunkingByLLMProvider(llm *LLMRequest) *ChunkingByLLMProvider
NewChunkingByLLMProvider creates a new ChunkingByLLMProvider with the specified LLM request client.
Example usage:
llm := NewLLMRequest(config, provider) chunker := NewChunkingByLLMProvider(llm) chunks, err := chunker.Chunk(ctx, longText)
func (*ChunkingByLLMProvider) Chunk ¶
Chunk splits the input text into coherent segments using the language model. It preserves sentence boundaries and semantic units while maintaining consistent chunk sizes. Returns an error if the chunking process fails at any stage.
Example usage:
chunker := NewChunkingByLLMProvider(llm) chunks, err := chunker.Chunk(ctx, "Long text to be split...") if err != nil { log.Fatal(err) } for i, chunk := range chunks { fmt.Printf("Chunk %d: %s\n", i, chunk) }
type ChunkingProvider ¶
type ChunkingProvider interface { // Chunk splits the input text into coherent segments using the provided context. // Returns the array of text chunks or an error if the chunking fails. Chunk(ctx context.Context, text string) ([]string, error) }
ChunkingProvider defines the interface for services that can split text into chunks.
type Client ¶ added in v0.19.0
type Client struct {
// contains filtered or unexported fields
}
func NewClient ¶ added in v0.19.0
func NewClient(transport Transport, config ClientConfig) *Client
func (*Client) CallTool ¶ added in v0.19.0
func (c *Client) CallTool(ctx context.Context, params CallToolParams) (CallToolResult, error)
func (*Client) GetCapabilities ¶ added in v0.19.0
func (c *Client) GetCapabilities() Capabilities
func (*Client) GetPrompt ¶ added in v0.19.0
func (c *Client) GetPrompt(ctx context.Context, params GetPromptParams) ([]PromptMessage, error)
func (*Client) GetProtocolVersion ¶ added in v0.19.0
func (*Client) GetState ¶ added in v0.19.0
func (c *Client) GetState() ConnectionState
func (*Client) IsInitialized ¶ added in v0.19.0
func (*Client) ListPrompts ¶ added in v0.19.0
type ClientConfig ¶ added in v0.19.0
type ClientConfig struct { RetryDelay time.Duration MaxRetries int ClientName string ClientVersion string Logger Logger SSE SSEConfig StdIO StdIOConfig MessageEndpoint string RequestTimeout time.Duration HealthCheckInterval time.Duration ConnectionTimeout time.Duration KeepAliveInterval time.Duration }
type ConnectionError ¶ added in v0.19.0
type ConnectionState ¶ added in v0.19.0
type ConnectionState int
const ( Disconnected ConnectionState = iota Connecting Connected Degraded Recovering BackingOff )
type DefaultLogger ¶ added in v0.19.0
DefaultLogger - a basic implementation using Go's standard log package
func (*DefaultLogger) Debug ¶ added in v0.19.0
func (l *DefaultLogger) Debug(args ...interface{})
func (*DefaultLogger) Error ¶ added in v0.19.0
func (l *DefaultLogger) Error(args ...interface{})
func (*DefaultLogger) Info ¶ added in v0.19.0
func (l *DefaultLogger) Info(args ...interface{})
func (*DefaultLogger) Warn ¶ added in v0.19.0
func (l *DefaultLogger) Warn(args ...interface{})
func (*DefaultLogger) WithContext ¶ added in v0.19.0
func (l *DefaultLogger) WithContext(ctx context.Context) Logger
WithContext - No-op for DefaultLogger. Returns itself.
func (*DefaultLogger) WithErr ¶ added in v0.19.0
func (l *DefaultLogger) WithErr(err error) Logger
WithErr - allows adding an error to the log
func (*DefaultLogger) WithFields ¶ added in v0.19.0
func (l *DefaultLogger) WithFields(fields map[string]interface{}) Logger
WithFields - allows adding structured fields to the log
type EmbeddingModel ¶
type EmbeddingModel string
EmbeddingModel represents the type of embedding model to be used for generating embeddings.
const ( // EmbeddingModelAllMiniLML6V2 is a lightweight model suitable for general-purpose embedding generation. EmbeddingModelAllMiniLML6V2 EmbeddingModel = "all-MiniLM-L6-v2" // EmbeddingModelAllMpnetBaseV2 is a more powerful model that provides higher quality embeddings. EmbeddingModelAllMpnetBaseV2 EmbeddingModel = "all-mpnet-base-v2" // EmbeddingModelParaphraseMultilingualMiniLML12V2 is specialized for multilingual text. EmbeddingModelParaphraseMultilingualMiniLML12V2 EmbeddingModel = "paraphrase-multilingual-MiniLM-L12-v2" )
Available embedding models that can be used with the EmbeddingService.
type EmbeddingObject ¶
type EmbeddingObject struct { Object string `json:"object"` Embedding []float32 `json:"embedding"` Index int `json:"index"` }
EmbeddingObject represents a single embedding result containing the generated vector.
type EmbeddingProvider ¶
type EmbeddingProvider interface { // Generate creates embedding vectors from the provided input using the specified model. // The input can be a string or array of strings. Generate(ctx context.Context, input interface{}, model EmbeddingModel) (*EmbeddingResponse, error) }
EmbeddingProvider defines the interface for services that can generate embeddings from text.
type EmbeddingResponse ¶
type EmbeddingResponse struct { Object string `json:"object"` Data []EmbeddingObject `json:"data"` Model EmbeddingModel `json:"model"` Usage Usage `json:"usage"` }
EmbeddingResponse represents the complete response from the embedding generation.
type EmbeddingService ¶
type EmbeddingService struct {
// contains filtered or unexported fields
}
EmbeddingService provides a high-level interface for generating embeddings using a provider.
func NewEmbeddingService ¶
func NewEmbeddingService(provider EmbeddingProvider) *EmbeddingService
NewEmbeddingService creates a new EmbeddingService with the specified provider.
func (*EmbeddingService) Generate ¶ added in v0.2.0
func (s *EmbeddingService) Generate(ctx context.Context, input interface{}, model EmbeddingModel) (*EmbeddingResponse, error)
Generate creates embeddings using the configured provider.
type Error ¶ added in v0.19.0
type Error struct { Code int `json:"code"` Message string `json:"message"` Data interface{} `json:"data,omitempty"` }
Error represents a JSON-RPC error object.
type GeminiModelService ¶ added in v0.13.0
type GeminiModelService interface { StartChat(initialHistory []*genai.Content) ChatSessionService ConfigureModel(config *genai.GenerationConfig, tools []*genai.Tool) error GenerateContentStream(ctx context.Context, parts ...genai.Part) (StreamIteratorService, error) }
GeminiModelService defines the interface for interacting with the Gemini model
func NewGoogleGeminiService ¶ added in v0.13.0
func NewGoogleGeminiService(apiKey, modelName string) (GeminiModelService, error)
NewGoogleGeminiService creates a new instance of GoogleGeminiService
type GeminiProvider ¶ added in v0.13.0
type GeminiProvider struct {
// contains filtered or unexported fields
}
func NewGeminiProvider ¶ added in v0.13.0
func NewGeminiProvider(service GeminiModelService, log Logger) (*GeminiProvider, error)
func (*GeminiProvider) Close ¶ added in v0.13.0
func (p *GeminiProvider) Close() error
func (*GeminiProvider) GetResponse ¶ added in v0.13.0
func (p *GeminiProvider) GetResponse(ctx context.Context, messages []LLMMessage, config LLMRequestConfig) (LLMResponse, error)
func (*GeminiProvider) GetStreamingResponse ¶ added in v0.13.0
func (p *GeminiProvider) GetStreamingResponse(ctx context.Context, messages []LLMMessage, config LLMRequestConfig) (<-chan StreamingLLMResponse, error)
type GeminiRole ¶ added in v0.13.0
type GeminiRole = string
const ( MaxToolTurns = 5 GeminiRoleUser GeminiRole = "user" GeminiRoleModel GeminiRole = "model" RoleFunction GeminiRole = "function" )
type GetPromptParams ¶ added in v0.19.0
type GetPromptParams struct { Name string `json:"name"` Arguments json.RawMessage `json:"arguments,omitempty"` }
type GoogleGeminiChatSessionService ¶ added in v0.13.0
type GoogleGeminiChatSessionService struct {
// contains filtered or unexported fields
}
GoogleGeminiChatSessionService implements ChatSessionService using genai.ChatSession
func (*GoogleGeminiChatSessionService) AppendHistory ¶ added in v0.13.0
func (ggcss *GoogleGeminiChatSessionService) AppendHistory(content *genai.Content)
AppendHistory appends a new content to the chat history
func (*GoogleGeminiChatSessionService) GetHistory ¶ added in v0.13.0
func (ggcss *GoogleGeminiChatSessionService) GetHistory() []*genai.Content
GetHistory retrieves the chat history
func (*GoogleGeminiChatSessionService) SendMessage ¶ added in v0.13.0
func (ggcss *GoogleGeminiChatSessionService) SendMessage(ctx context.Context, parts ...genai.Part) (*genai.GenerateContentResponse, error)
SendMessage sends a message to the chat session and returns the response
type GoogleGeminiService ¶ added in v0.13.0
type GoogleGeminiService struct {
// contains filtered or unexported fields
}
GoogleGeminiService implements GeminiModelService using the genai client
func (*GoogleGeminiService) ConfigureModel ¶ added in v0.13.0
func (g *GoogleGeminiService) ConfigureModel(config *genai.GenerationConfig, tools []*genai.Tool) error
ConfigureModel configures the model with the provided generation config and tools
func (*GoogleGeminiService) GenerateContentStream ¶ added in v0.13.0
func (g *GoogleGeminiService) GenerateContentStream(ctx context.Context, parts ...genai.Part) (StreamIteratorService, error)
GenerateContentStream generates content in a streaming manner
func (*GoogleGeminiService) StartChat ¶ added in v0.13.0
func (g *GoogleGeminiService) StartChat(initialHistory []*genai.Content) ChatSessionService
StartChat initializes a new chat session with the provided initial history
type GoogleGeminiStreamIteratorService ¶ added in v0.13.0
type GoogleGeminiStreamIteratorService struct {
// contains filtered or unexported fields
}
GoogleGeminiStreamIteratorService implements StreamIteratorService
func (*GoogleGeminiStreamIteratorService) Next ¶ added in v0.13.0
func (ggsis *GoogleGeminiStreamIteratorService) Next() (*genai.GenerateContentResponse, error)
Next retrieves the next content from the streaming iterator
type InMemoryChatHistoryStorage ¶ added in v0.11.0
type InMemoryChatHistoryStorage struct {
// contains filtered or unexported fields
}
InMemoryChatHistoryStorage is an in-memory implementation of ChatHistoryStorage
func NewInMemoryChatHistoryStorage ¶ added in v0.11.0
func NewInMemoryChatHistoryStorage() *InMemoryChatHistoryStorage
NewInMemoryChatHistoryStorage creates a new instance of InMemoryChatHistoryStorage
func (*InMemoryChatHistoryStorage) AddMessage ¶ added in v0.11.0
func (s *InMemoryChatHistoryStorage) AddMessage(ctx context.Context, sessionID string, message ChatHistoryMessage) error
AddMessage adds a new message to an existing conversation
func (*InMemoryChatHistoryStorage) CreateChat ¶ added in v0.11.0
func (s *InMemoryChatHistoryStorage) CreateChat(ctx context.Context) (*ChatHistory, error)
CreateChat initializes a new chat conversation
func (*InMemoryChatHistoryStorage) DeleteChat ¶ added in v0.11.0
func (s *InMemoryChatHistoryStorage) DeleteChat(ctx context.Context, sessionID string) error
DeleteChat removes a conversation by its ChatUUID
func (*InMemoryChatHistoryStorage) GetChat ¶ added in v0.11.0
func (s *InMemoryChatHistoryStorage) GetChat(ctx context.Context, sessionID string) (*ChatHistory, error)
GetChat retrieves a conversation by its ChatUUID
func (*InMemoryChatHistoryStorage) ListChatHistories ¶ added in v0.11.0
func (s *InMemoryChatHistoryStorage) ListChatHistories(ctx context.Context) ([]ChatHistory, error)
ListChatHistories returns all stored conversations
func (*InMemoryChatHistoryStorage) UpdateChatMetadata ¶ added in v0.19.0
func (s *InMemoryChatHistoryStorage) UpdateChatMetadata(ctx context.Context, sessionID string, metadata map[string]interface{}) error
UpdateChatMetadata updates the metadata for a chat session
type InitializeParams ¶ added in v0.19.0
type InitializeResponse ¶ added in v0.19.0
type InitializeResponse struct { JSONRPC string `json:"jsonrpc"` ID *json.RawMessage `json:"id"` Result InitializeResult `json:"result,omitempty"` Error *Error `json:"error,omitempty"` }
type InitializeResult ¶ added in v0.19.0
type InitializeResult struct { ProtocolVersion string `json:"protocolVersion"` Capabilities Capabilities `json:"capabilities"` ServerInfo ServerInfo `json:"serverInfo"` }
InitializeResult represents the result of server initialization.
type IntermediateJSONSchema ¶ added in v0.13.0
type IntermediateJSONSchema struct { Type string `json:"type"` Description string `json:"description,omitempty"` Properties map[string]*IntermediateJSONSchema `json:"properties,omitempty"` Required []string `json:"required,omitempty"` Items *IntermediateJSONSchema `json:"items,omitempty"` Enum []string `json:"enum,omitempty"` }
type JSONExtractor ¶ added in v0.3.0
type JSONExtractor struct {
// Target is a pointer to the struct where JSON data should be unmarshaled.
Target interface{}
}
JSONExtractor implements ResponseExtractor for JSON formatted responses.
Example ¶
Example usage
// Define a struct to hold the response data type Person struct { Name string `json:"name"` Age int `json:"age"` } // Create a response with JSON data response := LLMResponse{ Text: `{"name": "John Doe", "age": 30}`, } // Create an extractor with a target struct var person Person extractor := NewJSONExtractor(&person) // Extract the data result, err := extractor.Extract(response) if err != nil { panic(err) } // Use the extracted data extracted := result.(*Person) fmt.Println(extracted.Name)
Output: John Doe
func NewJSONExtractor ¶ added in v0.3.0
func NewJSONExtractor(target interface{}) *JSONExtractor
NewJSONExtractor creates a new JSONExtractor with the specified target struct.
func (*JSONExtractor) Extract ¶ added in v0.3.0
func (e *JSONExtractor) Extract(response LLMResponse) (interface{}, error)
Extract implements ResponseExtractor.Extract for JSON data.
type LLMError ¶
type LLMError struct { // Code represents the error code (usually HTTP status code for API errors) Code int // Message provides a detailed description of the error Message string }
LLMError represents errors that occur during LLM operations. It provides structured error information including an error code.
type LLMMessage ¶
type LLMMessage struct { Role LLMMessageRole Text string }
LLMMessage represents a message in a conversation with an LLM. It includes the role of the speaker (user, assistant, etc.) and the text of the message.
type LLMMessageRole ¶
type LLMMessageRole string
LLMMessageRole represents the role of a message in a conversation.
type LLMPromptTemplate ¶
type LLMPromptTemplate struct { // Template is the template string using Go's text/template syntax Template string // Data contains the values to be substituted in the template Data map[string]interface{} }
LLMPromptTemplate provides functionality for creating dynamic prompts using templates. It supports Go's text/template syntax for variable substitution and logic.
func (*LLMPromptTemplate) Parse ¶
func (p *LLMPromptTemplate) Parse() (string, error)
Parse processes the template with the provided data and returns the final prompt string. Returns an error if template parsing or execution fails.
type LLMProvider ¶
type LLMProvider interface { // GetResponse generates a response for the given question using the specified configuration. // Returns LLMResponse containing the generated text and metadata, or an error if the operation fails. GetResponse(ctx context.Context, messages []LLMMessage, config LLMRequestConfig) (LLMResponse, error) GetStreamingResponse(ctx context.Context, messages []LLMMessage, config LLMRequestConfig) (<-chan StreamingLLMResponse, error) }
LLMProvider defines the interface that all LLM providers must implement. This allows for easy swapping between different LLM providers.
type LLMRequest ¶
type LLMRequest struct {
// contains filtered or unexported fields
}
LLMRequest handles the configuration and execution of LLM requests. It provides a consistent interface for interacting with different LLM providers.
func NewLLMRequest ¶
func NewLLMRequest(config LLMRequestConfig, provider LLMProvider) *LLMRequest
NewLLMRequest creates a new LLMRequest with the specified configuration and provider. The provider parameter allows injecting different LLM implementations (OpenAI, Anthropic, etc.).
Example usage:
// Create provider provider := NewOpenAILLMProvider(ai.OpenAIProviderConfig{ APIKey: "your-api-key", Model: "gpt-3.5-turbo", }) // Configure request options config := NewRequestConfig( WithMaxToken(2000), WithTemperature(0.7), ) // Create LLM request client llm := NewLLMRequest(config, provider)
func (*LLMRequest) Generate ¶
func (r *LLMRequest) Generate(ctx context.Context, messages []LLMMessage) (LLMResponse, error)
Generate sends messages to the configured LLM provider and returns the response. It uses the provider and configuration specified during initialization.
Example usage:
messages := []LLMMessage{ {Role: SystemRole, Text: "You are a helpful assistant"}, {Role: UserRole, Text: "What is the capital of France?"}, } response, err := llm.Generate(messages) if err != nil { log.Fatal(err) } fmt.Printf("Response: %s\n", response.Text) fmt.Printf("Tokens used: %d\n", response.TotalOutputToken)
The method returns LLMResponse containing:
- Generated text
- Token usage statistics
- Completion time
- Other provider-specific metadata
func (*LLMRequest) GenerateStream ¶
func (r *LLMRequest) GenerateStream(ctx context.Context, messages []LLMMessage) (<-chan StreamingLLMResponse, error)
GenerateStream creates a streaming response channel for the given messages. It returns a channel that receives StreamingLLMResponse chunks and an error if initialization fails.
Example usage:
request := NewLLMRequest(config) stream, err := request.GenerateStream(context.Background(), []LLMMessage{ {Role: UserRole, Text: "Tell me a story"}, }) if err != nil { log.Fatal(err) } for response := range stream { if response.Error != nil { log.Printf("Error: %v", response.Error) break } if response.Done { break } fmt.Print(response.Text) }
type LLMRequestConfig ¶
type LLMRequestConfig struct {
// contains filtered or unexported fields
}
LLMRequestConfig defines configuration parameters for LLM requests.
func NewRequestConfig ¶
func NewRequestConfig(opts ...RequestOption) LLMRequestConfig
NewRequestConfig creates a new config with default values. Any non-zero values in the provided config will override the defaults.
type LLMResponse ¶
type LLMResponse struct { // Text contains the generated response from the model Text string // TotalInputToken is the number of tokens in the input prompt TotalInputToken int // TotalOutputToken is the number of tokens in the generated response TotalOutputToken int // CompletionTime is the total time taken to generate the response in seconds CompletionTime float64 }
LLMResponse encapsulates the response from an LLM provider. It includes both the generated text and metadata about the request.
func (LLMResponse) Extract ¶ added in v0.3.0
func (r LLMResponse) Extract(extractor ResponseExtractor) (interface{}, error)
Extract is a method on LLMResponse that uses a ResponseExtractor to extract structured data from the LLM response. It delegates the extraction logic to the provided extractor, which processes the response text and returns the extracted data in a structured format.
Parameters:
- extractor: An implementation of the ResponseExtractor interface that defines how to extract and parse the data from the LLM response.
Returns:
- interface{}: The extracted data, whose type depends on the specific extractor implementation.
- error: An error if the extraction process fails.
Example Usage:
type MyStruct struct { Field1 string `json:"field1"` Field2 int `json:"field2"` } func main() { response := LLMResponse{ Text: "```json\n{\"field1\": \"value1\", \"field2\": 42}\n```", TotalInputToken: 10, TotalOutputToken: 5, CompletionTime: 1.23, } var target MyStruct extractor := NewJSONExtractor(&target) result, err := response.Extract(extractor) if err != nil { log.Fatalf("Failed to extract data: %v", err) } fmt.Printf("Extracted data: %+v\n", result) }
type ListParams ¶ added in v0.19.0
type ListParams struct {
Cursor string `json:"cursor"`
}
type ListPromptsResult ¶ added in v0.19.0
type ListResourcesResult ¶ added in v0.19.0
type ListResourcesResult struct { Resources []Resource `json:"resources"` NextCursor string `json:"nextCursor,omitempty"` }
ListResourcesResult represents the result of listing resources.
type ListToolsResult ¶ added in v0.19.0
type ListToolsResult struct { Tools []Tool `json:"tools"` NextCursor string `json:"nextCursor,omitempty"` }
ListToolsResult represents the result of listing available tools.
type LogLevel ¶ added in v0.19.0
type LogLevel string
LogLevel represents the severity level of a log message. The levels follow standard syslog severity levels.
type LogManager ¶ added in v0.19.0
type LogManager struct {
// contains filtered or unexported fields
}
LogManager handles logging operations and level management.
func NewLogManager ¶ added in v0.19.0
func NewLogManager(output io.Writer) *LogManager
NewLogManager creates a new LogManager with the specified output writer. If output is nil, os.Stderr will be used.
func (*LogManager) IsLevelEnabled ¶ added in v0.19.0
func (lm *LogManager) IsLevelEnabled(level LogLevel) bool
IsLevelEnabled checks if a given log level is enabled based on the current level.
func (*LogManager) Log ¶ added in v0.19.0
func (lm *LogManager) Log(params LogMessageParams) error
Log logs a message if its level is enabled.
func (*LogManager) SetLevel ¶ added in v0.19.0
func (lm *LogManager) SetLevel(level LogLevel) error
SetLevel sets the current logging level.
type LogMessageParams ¶ added in v0.19.0
type LogMessageParams struct { Level LogLevel `json:"level"` Logger string `json:"logger,omitempty"` Data interface{} `json:"data"` }
LogMessageParams represents the parameters for logging a message.
type Logger ¶ added in v0.19.0
type Logger interface { Debug(args ...interface{}) Info(args ...interface{}) Warn(args ...interface{}) Error(args ...interface{}) WithFields(fields map[string]interface{}) Logger WithContext(ctx context.Context) Logger WithErr(err error) Logger }
Logger interface - defines the common logging methods
func NewDefaultLogger ¶ added in v0.19.0
func NewDefaultLogger() Logger
NewDefaultLogger creates a new DefaultLogger that logs to standard output
func NewLogrusLogger ¶ added in v0.19.0
NewLogrusLogger creates a new LogrusLogger with the provided logrus.Logger
func NewNullLogger ¶ added in v0.19.0
func NewNullLogger() Logger
NewNullLogger creates a new NullLogger
func NewSlogLogger ¶ added in v0.19.0
NewSlogLogger creates a new SlogLogger with the provided slog.Logger
func NewZapLogger ¶ added in v0.19.0
NewZapLogger creates a new ZapLogger with the provided zap.Logger
type LogrusLogger ¶ added in v0.19.0
type LogrusLogger struct {
// contains filtered or unexported fields
}
LogrusLogger implements the Logger interface using logrus
func (*LogrusLogger) Debug ¶ added in v0.19.0
func (l *LogrusLogger) Debug(args ...interface{})
Debug log for LogrusLogger
func (*LogrusLogger) Error ¶ added in v0.19.0
func (l *LogrusLogger) Error(args ...interface{})
Error log for LogrusLogger
func (*LogrusLogger) Info ¶ added in v0.19.0
func (l *LogrusLogger) Info(args ...interface{})
Info log for LogrusLogger
func (*LogrusLogger) Warn ¶ added in v0.19.0
func (l *LogrusLogger) Warn(args ...interface{})
Warn log for LogrusLogger
func (*LogrusLogger) WithContext ¶ added in v0.19.0
func (l *LogrusLogger) WithContext(ctx context.Context) Logger
WithContext adds context to the logger and returns a new LogrusLogger
func (*LogrusLogger) WithErr ¶ added in v0.19.0
func (l *LogrusLogger) WithErr(err error) Logger
WithErr adds an error to the logger and returns a new LogrusLogger
func (*LogrusLogger) WithFields ¶ added in v0.19.0
func (l *LogrusLogger) WithFields(fields map[string]interface{}) Logger
WithFields adds fields to the logger and returns a new LogrusLogger
type MockBedrockClient ¶ added in v0.18.0
MockBedrockClient is a mock type for the BedrockClient interface
func (*MockBedrockClient) Converse ¶ added in v0.18.0
func (_m *MockBedrockClient) Converse(ctx context.Context, params *bedrockruntime.ConverseInput, optFns ...func(*bedrockruntime.Options)) (*bedrockruntime.ConverseOutput, error)
Converse provides a mock function with given fields: ctx, params, optFns
func (*MockBedrockClient) ConverseStream ¶ added in v0.18.0
func (_m *MockBedrockClient) ConverseStream(ctx context.Context, params *bedrockruntime.ConverseStreamInput, optFns ...func(*bedrockruntime.Options)) (*bedrockruntime.ConverseStreamOutput, error)
ConverseStream provides a mock function with given fields: ctx, params, optFns
func (*MockBedrockClient) InvokeModel ¶ added in v0.18.0
func (_m *MockBedrockClient) InvokeModel(ctx context.Context, params *bedrockruntime.InvokeModelInput, optFns ...func(*bedrockruntime.Options)) (*bedrockruntime.InvokeModelOutput, error)
InvokeModel provides a mock function with given fields: ctx, params, optFns
type NoOpsLLMProvider ¶
type NoOpsLLMProvider struct {
// contains filtered or unexported fields
}
NoOpsLLMProvider implements LLMProvider interface for testing purposes. It provides default responses for both regular and streaming requests.
func NewNoOpsLLMProvider ¶
func NewNoOpsLLMProvider(opts ...NoOpsOption) *NoOpsLLMProvider
NewNoOpsLLMProvider creates a new NoOpsLLMProvider with optional configurations.
func (*NoOpsLLMProvider) GetResponse ¶
func (n *NoOpsLLMProvider) GetResponse(_ context.Context, _ []LLMMessage, _ LLMRequestConfig) (LLMResponse, error)
GetResponse implements the LLMProvider interface.
func (*NoOpsLLMProvider) GetStreamingResponse ¶
func (n *NoOpsLLMProvider) GetStreamingResponse(ctx context.Context, _ []LLMMessage, _ LLMRequestConfig) (<-chan StreamingLLMResponse, error)
GetStreamingResponse implements the LLMProvider interface.
type NoOpsOption ¶
type NoOpsOption func(*NoOpsLLMProvider)
NoOpsOption defines the function signature for option pattern.
func WithResponse ¶
func WithResponse(response LLMResponse) NoOpsOption
WithResponse sets a custom LLMResponse for the NoOpsProvider.
func WithStreamingResponse ¶
func WithStreamingResponse(response StreamingLLMResponse) NoOpsOption
WithStreamingResponse sets a custom StreamingLLMResponse for the NoOpsProvider.
type Notification ¶ added in v0.19.0
type Notification struct { JSONRPC string `json:"jsonrpc"` Method string `json:"method"` Params json.RawMessage `json:"params"` }
Notification represents a JSON-RPC notification message.
type NullLogger ¶ added in v0.19.0
type NullLogger struct{}
NullLogger - a logger that does nothing
func (*NullLogger) Debug ¶ added in v0.19.0
func (l *NullLogger) Debug(args ...interface{})
Debug is a no-op for NullLogger
func (*NullLogger) Error ¶ added in v0.19.0
func (l *NullLogger) Error(args ...interface{})
Error is a no-op for NullLogger
func (*NullLogger) Info ¶ added in v0.19.0
func (l *NullLogger) Info(args ...interface{})
Info is a no-op for NullLogger
func (*NullLogger) Warn ¶ added in v0.19.0
func (l *NullLogger) Warn(args ...interface{})
Warn is a no-op for NullLogger
func (*NullLogger) WithContext ¶ added in v0.19.0
func (l *NullLogger) WithContext(ctx context.Context) Logger
WithContext is a no-op for NullLogger
func (*NullLogger) WithErr ¶ added in v0.19.0
func (l *NullLogger) WithErr(err error) Logger
WithErr is a no-op for NullLogger
func (*NullLogger) WithFields ¶ added in v0.19.0
func (l *NullLogger) WithFields(fields map[string]interface{}) Logger
WithFields is a no-op for NullLogger
type OpenAIClient ¶
type OpenAIClient struct {
// contains filtered or unexported fields
}
OpenAIClient implements the OpenAIClientProvider interface using OpenAI's official SDK.
func NewOpenAIClient ¶ added in v0.1.0
func NewOpenAIClient(apiKey string, opts ...option.RequestOption) *OpenAIClient
NewOpenAIClient creates a new instance of OpenAIClient with the provided API key and optional client options.
Example usage:
// Basic usage with API key client := NewOpenAIClient("your-api-key") // Usage with custom HTTP client httpClient := &http.Client{Timeout: 30 * time.Second} client := NewOpenAIClient( "your-api-key", option.WithHTTPClient(httpClient), )
func (*OpenAIClient) CreateCompletion ¶
func (c *OpenAIClient) CreateCompletion(ctx context.Context, params openai.ChatCompletionNewParams) (*openai.ChatCompletion, error)
CreateCompletion implements the OpenAIClientProvider interface using the OpenAI client.
func (*OpenAIClient) CreateStreamingCompletion ¶
func (c *OpenAIClient) CreateStreamingCompletion(ctx context.Context, params openai.ChatCompletionNewParams) *ssestream.Stream[openai.ChatCompletionChunk]
CreateStreamingCompletion implements the streaming support for the OpenAIClientProvider interface.
type OpenAIClientProvider ¶ added in v0.1.0
type OpenAIClientProvider interface { // CreateCompletion creates a new chat completion using OpenAI's API. CreateCompletion(ctx context.Context, params openai.ChatCompletionNewParams) (*openai.ChatCompletion, error) // CreateStreamingCompletion creates a streaming chat completion using OpenAI's API. CreateStreamingCompletion(ctx context.Context, params openai.ChatCompletionNewParams) *ssestream.Stream[openai.ChatCompletionChunk] }
OpenAIClientProvider defines the interface for interacting with OpenAI's API. This interface abstracts the essential operations used by OpenAILLMProvider.
type OpenAICompatibleEmbeddingProvider ¶ added in v0.2.0
type OpenAICompatibleEmbeddingProvider struct {
// contains filtered or unexported fields
}
OpenAICompatibleEmbeddingProvider implements EmbeddingProvider using an OpenAI-compatible REST API.
func NewOpenAICompatibleEmbeddingProvider ¶ added in v0.2.0
func NewOpenAICompatibleEmbeddingProvider(baseURL string, httpClient *http.Client) *OpenAICompatibleEmbeddingProvider
NewOpenAICompatibleEmbeddingProvider creates a new provider that works with OpenAI-compatible APIs.
func (*OpenAICompatibleEmbeddingProvider) Generate ¶ added in v0.2.0
func (p *OpenAICompatibleEmbeddingProvider) Generate(ctx context.Context, input interface{}, model EmbeddingModel) (*EmbeddingResponse, error)
Generate implements EmbeddingProvider.Generate for OpenAI-compatible APIs.
type OpenAILLMProvider ¶
type OpenAILLMProvider struct {
// contains filtered or unexported fields
}
OpenAILLMProvider implements the LLMProvider interface using OpenAI's official SDK.
func NewOpenAILLMProvider ¶
func NewOpenAILLMProvider(config OpenAIProviderConfig) *OpenAILLMProvider
NewOpenAILLMProvider creates a new OpenAI provider with the specified configuration. If no model is specified, it defaults to GPT-3.5-turbo.
Example usage:
// Create client client := NewOpenAIClient("your-api-key") // Create provider with default model provider := NewOpenAILLMProvider(OpenAIProviderConfig{ Client: client, }) // Create provider with specific model provider := NewOpenAILLMProvider(OpenAIProviderConfig{ Client: client, Model: "gpt-4", })
func (*OpenAILLMProvider) GetResponse ¶
func (p *OpenAILLMProvider) GetResponse(ctx context.Context, messages []LLMMessage, config LLMRequestConfig) (LLMResponse, error)
GetResponse generates a response using OpenAI's API for the given messages and configuration. It supports different message roles (user, assistant, system) and handles them appropriately.
Example usage:
messages := []LLMMessage{ {Role: "system", Text: "You are a helpful assistant"}, {Role: "user", Text: "What is the capital of France?"}, } response, err := provider.GetResponse(messages, config) if err != nil { log.Fatal(err) } fmt.Printf("Response: %s\n", response.Text)
func (*OpenAILLMProvider) GetStreamingResponse ¶
func (p *OpenAILLMProvider) GetStreamingResponse(ctx context.Context, messages []LLMMessage, config LLMRequestConfig) (<-chan StreamingLLMResponse, error)
GetStreamingResponse generates a streaming response using OpenAI's API. It supports streaming tokens as they're generated and handles context cancellation.
Example usage:
stream, err := provider.GetStreamingResponse(ctx, messages, config) if err != nil { log.Fatal(err) } for response := range stream { if response.Error != nil { log.Printf("Error: %v", response.Error) break } fmt.Print(response.Text) }
type OpenAIProviderConfig ¶
type OpenAIProviderConfig struct { // Client is the OpenAIClientProvider implementation to use Client OpenAIClientProvider // Model specifies which OpenAI model to use (e.g., "gpt-4", "gpt-3.5-turbo") Model openai.ChatModel }
OpenAIProviderConfig holds configuration for OpenAI provider.
type PostgresProvider ¶
type PostgresProvider struct {
// contains filtered or unexported fields
}
PostgresProvider implements VectorStorage using PostgreSQL with pgvector extension.
func NewPostgresProvider ¶
func NewPostgresProvider(config PostgresStorageConfig) (*PostgresProvider, error)
NewPostgresProvider creates a new PostgreSQL-based vector storage.
func (*PostgresProvider) Close ¶
func (p *PostgresProvider) Close() error
Close closes the database connection.
func (*PostgresProvider) CreateCollection ¶
func (p *PostgresProvider) CreateCollection(ctx context.Context, config *VectorCollectionConfig) error
CreateCollection implements VectorStorage.CreateCollection.
func (*PostgresProvider) DeleteCollection ¶
func (p *PostgresProvider) DeleteCollection(ctx context.Context, name string) error
DeleteCollection implements VectorStorage.DeleteCollection.
func (*PostgresProvider) DeleteDocument ¶
func (p *PostgresProvider) DeleteDocument(ctx context.Context, collection, id string) error
DeleteDocument implements VectorStorage.DeleteDocument.
func (*PostgresProvider) GetDocument ¶
func (p *PostgresProvider) GetDocument(ctx context.Context, collection, id string) (*VectorDocument, error)
GetDocument implements VectorStorage.GetDocument.
func (*PostgresProvider) Initialize ¶
func (p *PostgresProvider) Initialize(ctx context.Context) error
Initialize implements VectorStorageProvider.Initialize.
func (*PostgresProvider) ListCollections ¶
func (p *PostgresProvider) ListCollections(ctx context.Context) ([]string, error)
ListCollections implements VectorStorage.ListCollections.
func (*PostgresProvider) SearchByID ¶
func (p *PostgresProvider) SearchByID(ctx context.Context, collection, id string, opts *VectorSearchOptions) ([]VectorSearchResult, error)
SearchByID implements VectorStorage.SearchByID.
func (*PostgresProvider) SearchByVector ¶
func (p *PostgresProvider) SearchByVector(ctx context.Context, collection string, vector []float32, opts *VectorSearchOptions) ([]VectorSearchResult, error)
SearchByVector implements VectorStorage.SearchByVector.
func (*PostgresProvider) UpsertDocument ¶
func (p *PostgresProvider) UpsertDocument(ctx context.Context, collection string, doc *VectorDocument) error
UpsertDocument implements VectorStorage.UpsertDocument.
func (*PostgresProvider) UpsertDocuments ¶
func (p *PostgresProvider) UpsertDocuments(ctx context.Context, collection string, docs []*VectorDocument) error
UpsertDocuments implements VectorStorage.UpsertDocuments.
type PostgresStorageConfig ¶
type PostgresStorageConfig struct { // ConnectionString is the PostgreSQL connection string ConnectionString string // MaxDimension is the maximum allowed vector dimension MaxDimension int // SchemaName is the PostgreSQL schema to use (default: public) SchemaName string }
PostgresStorageConfig holds configuration for PostgreSQL vector storage.
type Prompt ¶ added in v0.19.0
type Prompt struct { Name string `json:"name"` Description string `json:"description,omitempty"` Arguments []PromptArgument `json:"arguments,omitempty"` Messages []PromptMessage `json:"messages,omitempty"` }
type PromptArgument ¶ added in v0.19.0
type PromptContent ¶ added in v0.19.0
type PromptGetResponse ¶ added in v0.19.0
type PromptGetResponse struct { Description string `json:"description"` Messages []PromptMessage `json:"messages"` }
type PromptMessage ¶ added in v0.19.0
type PromptMessage struct { Role string `json:"role"` Content PromptContent `json:"content"` }
type ReadResourceParams ¶ added in v0.19.0
type ReadResourceParams struct {
URI string `json:"uri"`
}
ReadResourceParams represents parameters for reading a resource.
type ReadResourceResult ¶ added in v0.19.0
type ReadResourceResult struct {
Contents []ResourceContent `json:"contents"`
}
ReadResourceResult represents the result of reading a resource.
type Request ¶ added in v0.19.0
type Request struct { JSONRPC string `json:"jsonrpc"` ID *json.RawMessage `json:"id"` Method string `json:"method"` Params json.RawMessage `json:"params"` }
type RequestOption ¶
type RequestOption func(*LLMRequestConfig)
RequestOption is a function that modifies the config
func UseToolsProvider ¶ added in v0.7.0
func UseToolsProvider(provider *ToolsProvider) RequestOption
func WithAllowedTools ¶ added in v0.10.0
func WithAllowedTools(allowedTools []string) RequestOption
func WithMaxIterations ¶ added in v0.13.0
func WithMaxIterations(maxIterations int) RequestOption
WithMaxIterations sets the maximum number of iterations for the LLM request.
func WithMaxToken ¶
func WithMaxToken(maxToken int64) RequestOption
WithMaxToken sets the max token value
func WithTemperature ¶
func WithTemperature(temp float64) RequestOption
WithTemperature sets the temperature value
func WithThinkingEnabled ¶ added in v0.17.0
func WithThinkingEnabled(thinkingBudget int64) RequestOption
WithThinkingEnabled sets the thinking option for the LLM request configuration.
func WithTracingEnabled ¶ added in v0.13.0
func WithTracingEnabled() RequestOption
WithTracingEnabled sets the tracing option for the LLM request configuration.
type Resource ¶ added in v0.19.0
type Resource struct { URI string `json:"uri"` Name string `json:"name"` Description string `json:"description,omitempty"` MimeType string `json:"mimeType,omitempty"` Size int `json:"size,omitempty"` TextContent string `json:"-"` }
Resource represents a content resource in the MCP system.
type ResourceContent ¶ added in v0.19.0
type ResourceContent struct { URI string `json:"uri"` MimeType string `json:"mimeType"` Text string `json:"text,omitempty"` Blob string `json:"blob,omitempty"` }
ResourceContent represents the actual content of a resource.
type Response ¶ added in v0.19.0
type Response struct { JSONRPC string `json:"jsonrpc"` ID *json.RawMessage `json:"id"` Result interface{} `json:"result,omitempty"` Error *Error `json:"error,omitempty"` }
Response represents a JSON-RPC response message.
type ResponseExtractor ¶ added in v0.3.0
type ResponseExtractor interface { // Extract processes the LLM response and returns the extracted data. // The extracted data's type depends on the specific extractor implementation. Extract(response LLMResponse) (interface{}, error) }
ResponseExtractor defines the interface for extracting structured data from LLM responses.
type ResponseToStartupClientRequest ¶ added in v0.19.0
type ResponseToStartupClientRequest struct { JSONRPC string `json:"jsonrpc"` ID *json.RawMessage `json:"id"` Result struct { ProtocolVersion string `json:"protocolVersion"` Capabilities map[string]interface{} `json:"capabilities"` ServerInfo struct { Name string `json:"name"` Version string `json:"version"` } `json:"serverInfo"` } `json:"result,omitempty"` Error *Error `json:"error,omitempty"` }
ResponseToStartupClientRequest represents a JSON-RPC response message.
type RetryStrategy ¶ added in v0.19.0
type SQLiteChatHistoryStorage ¶ added in v0.14.0
type SQLiteChatHistoryStorage struct {
// contains filtered or unexported fields
}
SQLiteChatHistoryStorage is an SQLite implementation of ChatHistoryStorage
func NewSQLiteChatHistoryStorage ¶ added in v0.14.0
func NewSQLiteChatHistoryStorage(db *sql.DB, logger Logger) (*SQLiteChatHistoryStorage, error)
NewSQLiteChatHistoryStorage creates a new instance of SQLiteChatHistoryStorage It takes the path to the SQLite database file.
func (*SQLiteChatHistoryStorage) AddMessage ¶ added in v0.14.0
func (s *SQLiteChatHistoryStorage) AddMessage(ctx context.Context, sessionID string, message ChatHistoryMessage) error
AddMessage adds a new message to an existing conversation in SQLite
func (*SQLiteChatHistoryStorage) Close ¶ added in v0.14.0
func (s *SQLiteChatHistoryStorage) Close() error
Close closes the database connection
func (*SQLiteChatHistoryStorage) CreateChat ¶ added in v0.14.0
func (s *SQLiteChatHistoryStorage) CreateChat(ctx context.Context) (*ChatHistory, error)
CreateChat initializes a new chat conversation in SQLite
func (*SQLiteChatHistoryStorage) DeleteChat ¶ added in v0.14.0
func (s *SQLiteChatHistoryStorage) DeleteChat(ctx context.Context, sessionID string) error
DeleteChat removes a chat and its messages
func (*SQLiteChatHistoryStorage) GetChat ¶ added in v0.14.0
func (s *SQLiteChatHistoryStorage) GetChat(ctx context.Context, sessionID string) (*ChatHistory, error)
GetChat retrieves a chat by its session ID
func (*SQLiteChatHistoryStorage) ListChatHistories ¶ added in v0.14.0
func (s *SQLiteChatHistoryStorage) ListChatHistories(ctx context.Context) ([]ChatHistory, error)
ListChatHistories returns all chat histories
func (*SQLiteChatHistoryStorage) UpdateChatMetadata ¶ added in v0.19.0
func (s *SQLiteChatHistoryStorage) UpdateChatMetadata(ctx context.Context, sessionID string, metadata map[string]interface{}) error
UpdateChatMetadata updates the metadata for an existing chat
type SSEServer ¶ added in v0.19.0
type SSEServer struct { *BaseServer // contains filtered or unexported fields }
SSEServer is the MCP server implementation using Server-Sent Events.
func NewSSEServer ¶ added in v0.19.0
func NewSSEServer(baseServer *BaseServer) *SSEServer
NewSSEServer creates a new SSEServer.
func (*SSEServer) Run ¶ added in v0.19.0
Run starts the MCP SSEServer, listening for incoming HTTP connections.
func (*SSEServer) SetAddress ¶ added in v0.19.0
SetAddress allows setting the server's listening address.
type SSETransport ¶ added in v0.19.0
type SSETransport struct {
// contains filtered or unexported fields
}
func NewSSETransport ¶ added in v0.19.0
func NewSSETransport(logger Logger) *SSETransport
func (*SSETransport) Close ¶ added in v0.19.0
func (t *SSETransport) Close(ctx context.Context) error
func (*SSETransport) Connect ¶ added in v0.19.0
func (t *SSETransport) Connect(ctx context.Context, config ClientConfig) error
func (*SSETransport) SendMessage ¶ added in v0.19.0
func (t *SSETransport) SendMessage(ctx context.Context, message interface{}) error
func (*SSETransport) SetReceiveMessageCallback ¶ added in v0.19.0
func (t *SSETransport) SetReceiveMessageCallback(callback func(message []byte))
type ServerConfig ¶ added in v0.19.0
type ServerConfig struct {
// contains filtered or unexported fields
}
ServerConfig holds all configuration for BaseServer
type ServerConfigOption ¶ added in v0.19.0
type ServerConfigOption func(*ServerConfig)
ServerConfigOption is a function that modifies ServerConfig
func UseCapabilities ¶ added in v0.19.0
func UseCapabilities(capabilities Capabilities) ServerConfigOption
func UseLogLevel ¶ added in v0.19.0
func UseLogLevel(level LogLevel) ServerConfigOption
UseLogLevel sets minimum log level
func UseLogger ¶ added in v0.19.0
func UseLogger(logger Logger) ServerConfigOption
UseLogger sets a custom logger
func UseSSEServerPort ¶ added in v0.19.0
func UseSSEServerPort(port string) ServerConfigOption
func UseServerInfo ¶ added in v0.19.0
func UseServerInfo(name, version string) ServerConfigOption
UseServerInfo sets server name and version
type ServerInfo ¶ added in v0.19.0
ServerInfo represents server information.
type SetLogLevelParams ¶ added in v0.19.0
type SetLogLevelParams struct {
Level LogLevel `json:"level"`
}
SetLogLevelParams represents the parameters for setting the log level.
type SlogLogger ¶ added in v0.19.0
type SlogLogger struct {
// contains filtered or unexported fields
}
SlogLogger implements the Logger interface using the standard library's slog package
func (*SlogLogger) Debug ¶ added in v0.19.0
func (l *SlogLogger) Debug(args ...interface{})
Debug log for SlogLogger
func (*SlogLogger) Error ¶ added in v0.19.0
func (l *SlogLogger) Error(args ...interface{})
Error log for SlogLogger
func (*SlogLogger) Info ¶ added in v0.19.0
func (l *SlogLogger) Info(args ...interface{})
Info log for SlogLogger
func (*SlogLogger) Warn ¶ added in v0.19.0
func (l *SlogLogger) Warn(args ...interface{})
Warn log for SlogLogger
func (*SlogLogger) WithContext ¶ added in v0.19.0
func (l *SlogLogger) WithContext(ctx context.Context) Logger
WithContext adds context to the logger and returns a new SlogLogger
func (*SlogLogger) WithErr ¶ added in v0.19.0
func (l *SlogLogger) WithErr(err error) Logger
WithErr adds an error to the logger and returns a new SlogLogger
func (*SlogLogger) WithFields ¶ added in v0.19.0
func (l *SlogLogger) WithFields(fields map[string]interface{}) Logger
WithFields adds fields to the logger and returns a new SlogLogger
type StdIOServer ¶ added in v0.19.0
type StdIOServer struct { *BaseServer // Embed the common server // contains filtered or unexported fields }
StdIOServer is the MCP server implementation using standard input/output.
func NewStdIOServer ¶ added in v0.19.0
func NewStdIOServer(baseServer *BaseServer, in io.Reader, out io.Writer) *StdIOServer
NewStdIOServer creates a new StdIOServer.
type StdIOTransport ¶ added in v0.19.0
type StdIOTransport struct {
// contains filtered or unexported fields
}
func NewStdIOTransport ¶ added in v0.19.0
func NewStdIOTransport(logger Logger) *StdIOTransport
func (*StdIOTransport) Close ¶ added in v0.19.0
func (t *StdIOTransport) Close(ctx context.Context) error
func (*StdIOTransport) Connect ¶ added in v0.19.0
func (t *StdIOTransport) Connect(ctx context.Context, config ClientConfig) error
func (*StdIOTransport) SendMessage ¶ added in v0.19.0
func (t *StdIOTransport) SendMessage(ctx context.Context, message interface{}) error
func (*StdIOTransport) SetReceiveMessageCallback ¶ added in v0.19.0
func (t *StdIOTransport) SetReceiveMessageCallback(callback func(message []byte))
type StreamIteratorService ¶ added in v0.13.0
type StreamIteratorService interface {
Next() (*genai.GenerateContentResponse, error)
}
StreamIteratorService defines the interface for streaming content generation
type StreamingLLMResponse ¶
type StreamingLLMResponse struct { // Text contains the partial response text Text string // Done indicates if this is the final chunk Done bool // Error contains any error that occurred during streaming Error error // TokenCount is the number of tokens in this chunk TokenCount int }
StreamingLLMResponse represents a chunk of streaming response from an LLM provider. It contains partial text, completion status, any errors, and token usage information.
type Table ¶ added in v0.3.0
type Table struct { Headers []string // Headers contains the column names of the table. Rows [][]string // Rows contains the data rows of the table. Format string // Format specifies the format of the table (e.g., "markdown"). }
Table represents extracted tabular data from an LLM response. It includes headers, rows, and the format of the table (e.g., "markdown").
type TableExtractor ¶ added in v0.3.0
type TableExtractor struct {
Format string // Format specifies the expected format of the table (e.g., "markdown").
}
TableExtractor implements the ResponseExtractor interface for extracting tabular data.
Example ¶
// Create a response containing a markdown table response := LLMResponse{ Text: `| Name | Age | |------|-----| | John | 30 | | Jane | 25 |`, } // Create a table extractor extractor := NewTableExtractor("markdown") // Extract the table result, err := extractor.Extract(response) if err != nil { panic(err) } // Use the extracted table table := result.(*Table) fmt.Printf("Headers: %v\n", table.Headers) fmt.Printf("First row: %v\n", table.Rows[0])
Output: Headers: [Name Age] First row: [John 30]
func NewTableExtractor ¶ added in v0.3.0
func NewTableExtractor(format string) *TableExtractor
NewTableExtractor creates a new TableExtractor for extracting tables in the specified format. The format parameter specifies the expected format of the table (e.g., "markdown").
func (*TableExtractor) Extract ¶ added in v0.3.0
func (e *TableExtractor) Extract(response LLMResponse) (interface{}, error)
Extract processes the LLM response to extract a table in the specified format. It identifies the table headers and rows, and returns a Table struct containing the extracted data.
Parameters:
- response: The LLMResponse containing the text to be processed.
Returns:
- interface{}: A Table struct containing the extracted headers and rows.
- error: An error if the table is malformed or no valid table is found.
Example Usage:
func main() { response := LLMResponse{ Text: ` | Name | Age | Occupation | |-------|-----|------------| | Alice | 30 | Engineer | | Bob | 25 | Designer | `, } extractor := NewTableExtractor("markdown") result, err := response.Extract(extractor) if err != nil { log.Fatalf("Failed to extract table: %v", err) } table := result.(*Table) fmt.Printf("Headers: %v\n", table.Headers) for _, row := range table.Rows { fmt.Printf("Row: %v\n", row) } }
type TagExtractor ¶ added in v0.3.0
type TagExtractor struct { // Tag is the name of the tag to extract content from (e.g., "result", "code") Tag string }
TagExtractor implements ResponseExtractor for custom tag-based responses.
Example ¶
// Create a response with tagged content response := LLMResponse{ Text: "The answer is: <result>42</result>", } // Create a tag extractor extractor := NewTagExtractor("result") // Extract the content result, err := extractor.Extract(response) if err != nil { panic(err) } // Use the extracted content fmt.Println(result.(string))
Output: 42
func NewTagExtractor ¶ added in v0.3.0
func NewTagExtractor(tag string) *TagExtractor
NewTagExtractor creates a new TagExtractor for the specified tag.
func (*TagExtractor) Extract ¶ added in v0.3.0
func (e *TagExtractor) Extract(response LLMResponse) (interface{}, error)
Extract implements ResponseExtractor.Extract for tag-based content.
type Tool ¶ added in v0.19.0
type Tool struct { Name string `json:"name"` Description string `json:"description"` InputSchema json.RawMessage `json:"inputSchema"` Handler func(ctx context.Context, params CallToolParams) (CallToolResult, error) `json:"-"` }
Tool represents a callable tool in the MCP system.
type ToolHandler ¶ added in v0.19.0
type ToolHandler interface { GetName() string GetDescription() string GetInputSchema() json.RawMessage Handler(params CallToolParams) (CallToolResult, error) }
type ToolImplementation ¶ added in v0.19.0
type ToolImplementation func(args json.RawMessage) (CallToolResult, error)
type ToolResultContent ¶ added in v0.19.0
ToolResultContent represents the content returned by a tool.
type ToolsProvider ¶ added in v0.7.0
type ToolsProvider struct {
// contains filtered or unexported fields
}
func NewToolsProvider ¶ added in v0.7.0
func NewToolsProvider() *ToolsProvider
NewToolsProvider creates a new ToolsProvider with no initial MCP client or tools.
func (*ToolsProvider) AddMCPClient ¶ added in v0.7.0
func (p *ToolsProvider) AddMCPClient(client *Client) error
AddMCPClient injects the MCP client into the provider. If tools are added, MCP client usage is restricted.
func (*ToolsProvider) AddTools ¶ added in v0.7.0
func (p *ToolsProvider) AddTools(tools []Tool) error
AddTools injects tools into the provider. If tools are added, MCP client usage is restricted.
func (*ToolsProvider) ExecuteTool ¶ added in v0.7.0
func (p *ToolsProvider) ExecuteTool(ctx context.Context, params CallToolParams) (CallToolResult, error)
ExecuteTool executes a tool with the specified ID, name, and parameters.
type TracingLLMProvider ¶ added in v0.10.0
type TracingLLMProvider struct {
// contains filtered or unexported fields
}
TracingLLMProvider implements the decorator pattern for tracing
func NewTracingLLMProvider ¶ added in v0.10.0
func NewTracingLLMProvider(provider LLMProvider) *TracingLLMProvider
NewTracingLLMProvider creates a new tracing decorator for any LLMProvider
func (*TracingLLMProvider) GetResponse ¶ added in v0.10.0
func (t *TracingLLMProvider) GetResponse(ctx context.Context, messages []LLMMessage, config LLMRequestConfig) (LLMResponse, error)
GetResponse implements LLMProvider interface with added tracing
func (*TracingLLMProvider) GetStreamingResponse ¶ added in v0.10.0
func (t *TracingLLMProvider) GetStreamingResponse(ctx context.Context, messages []LLMMessage, config LLMRequestConfig) (<-chan StreamingLLMResponse, error)
GetStreamingResponse implements LLMProvider interface with added tracing
type VectorCollectionConfig ¶
type VectorCollectionConfig struct { // Name is the identifier for the collection Name string `json:"name"` // Dimension is the size of the vectors in this collection Dimension int `json:"dimension"` // IndexType specifies the type of index to use for similarity search IndexType VectorIndexType `json:"index_type"` // DistanceType specifies the distance metric to use for similarity search DistanceType VectorDistanceType `json:"distance_type"` // CustomFields allows defining additional schema fields CustomFields map[string]VectorFieldConfig `json:"custom_fields,omitempty"` }
VectorCollectionConfig defines the configuration for a vector collection
type VectorDistanceType ¶
type VectorDistanceType string
VectorDistanceType represents the distance metric used for similarity search
const ( // DistanceTypeCosine represents cosine similarity distance metric DistanceTypeCosine VectorDistanceType = "cosine" // DistanceTypeEuclidean represents Euclidean distance metric DistanceTypeEuclidean VectorDistanceType = "euclidean" // DistanceTypeDotProduct represents dot product distance metric DistanceTypeDotProduct VectorDistanceType = "dot_product" )
type VectorDocument ¶
type VectorDocument struct { // ID is the unique identifier for the document ID string `json:"id"` // Vector is the embedding representation of the document Vector []float32 `json:"vector"` // Content is the original text content of the document Content string `json:"content"` // Metadata stores additional information about the document Metadata map[string]interface{} `json:"metadata"` // CreatedAt is the timestamp when the document was created CreatedAt time.Time `json:"created_at"` // UpdatedAt is the timestamp when the document was last updated UpdatedAt time.Time `json:"updated_at"` }
VectorDocument represents a document with its embedding vector and metadata.
type VectorError ¶
VectorError represents errors that occur during vector storage operations.
func (*VectorError) Error ¶
func (e *VectorError) Error() string
Error implements the error interface.
type VectorFieldConfig ¶
type VectorFieldConfig struct { // Type specifies the data type of the field Type string `json:"type"` // Required indicates if the field must be present Required bool `json:"required"` // Indexed indicates if the field should be indexed for searching Indexed bool `json:"indexed"` }
VectorFieldConfig defines the configuration for a custom field in the schema
type VectorIndexType ¶
type VectorIndexType string
VectorIndexType represents the type of index used for vector similarity search
const ( // IndexTypeFlat represents a flat (brute force) index IndexTypeFlat VectorIndexType = "flat" // IndexTypeIVFFlat represents an IVF (Inverted File) flat index IndexTypeIVFFlat VectorIndexType = "ivf_flat" // IndexTypeHNSW represents a Hierarchical Navigable Small World graph index IndexTypeHNSW VectorIndexType = "hnsw" )
type VectorSearchOptions ¶
type VectorSearchOptions struct { // Limit specifies the maximum number of results to return Limit int `json:"limit"` // Offset specifies the number of results to skip Offset int `json:"offset"` // Filter is an optional query to filter results Filter map[string]interface{} `json:"filter,omitempty"` // IncludeMetadata indicates whether to include metadata in the results IncludeMetadata bool `json:"include_metadata"` // IncludeVectors indicates whether to include vectors in the results IncludeVectors bool `json:"include_vectors"` }
VectorSearchOptions defines the options for vector similarity search
type VectorSearchResult ¶
type VectorSearchResult struct { // Document is the matched document Document *VectorDocument `json:"document"` // Score is the similarity score (lower is more similar) Score float32 `json:"score"` // Distance is the actual distance value used for scoring Distance float32 `json:"distance"` }
VectorSearchResult represents a single result from a vector similarity search
type VectorStorage ¶
type VectorStorage struct {
// contains filtered or unexported fields
}
VectorStorage provides a high-level interface for vector storage operations. It acts as a facade for the underlying storage provider implementation.
func NewVectorStorage ¶
func NewVectorStorage(ctx context.Context, provider VectorStorageProvider) (*VectorStorage, error)
NewVectorStorage creates a new VectorStorage instance with the specified provider and initializes the storage.
Example usage:
provider := NewPostgresProvider(pgConfig) storage, err := NewVectorStorage(ctx, provider) if err != nil { log.Fatal(err) }
func (*VectorStorage) Close ¶
func (s *VectorStorage) Close() error
Close closes the underlying storage provider connection.
func (*VectorStorage) CreateCollection ¶
func (s *VectorStorage) CreateCollection(ctx context.Context, config *VectorCollectionConfig) error
CreateCollection creates a new vector collection with the specified configuration.
func (*VectorStorage) DeleteCollection ¶
func (s *VectorStorage) DeleteCollection(ctx context.Context, name string) error
DeleteCollection removes an existing vector collection.
func (*VectorStorage) DeleteDocument ¶
func (s *VectorStorage) DeleteDocument(ctx context.Context, collection, id string) error
DeleteDocument removes a document from the collection.
func (*VectorStorage) GetDocument ¶
func (s *VectorStorage) GetDocument(ctx context.Context, collection, id string) (*VectorDocument, error)
GetDocument retrieves a document by its ID.
func (*VectorStorage) ListCollections ¶
func (s *VectorStorage) ListCollections(ctx context.Context) ([]string, error)
ListCollections returns a list of all available collections.
func (*VectorStorage) SearchByID ¶
func (s *VectorStorage) SearchByID(ctx context.Context, collection, id string, opts *VectorSearchOptions) ([]VectorSearchResult, error)
SearchByID performs a similarity search using an existing document as the query.
func (*VectorStorage) SearchByVector ¶
func (s *VectorStorage) SearchByVector(ctx context.Context, collection string, vector []float32, opts *VectorSearchOptions) ([]VectorSearchResult, error)
SearchByVector performs a vector similarity search.
func (*VectorStorage) UpsertDocument ¶
func (s *VectorStorage) UpsertDocument(ctx context.Context, collection string, doc *VectorDocument) error
UpsertDocument adds or updates a document in the specified collection.
func (*VectorStorage) UpsertDocuments ¶
func (s *VectorStorage) UpsertDocuments(ctx context.Context, collection string, docs []*VectorDocument) error
UpsertDocuments adds or updates multiple documents in batch.
type VectorStorageProvider ¶
type VectorStorageProvider interface { // Initialize sets up the required database structure Initialize(ctx context.Context) error // Collection Operations CreateCollection(ctx context.Context, config *VectorCollectionConfig) error DeleteCollection(ctx context.Context, name string) error ListCollections(ctx context.Context) ([]string, error) // Document Operations UpsertDocument(ctx context.Context, collection string, doc *VectorDocument) error UpsertDocuments(ctx context.Context, collection string, docs []*VectorDocument) error GetDocument(ctx context.Context, collection, id string) (*VectorDocument, error) DeleteDocument(ctx context.Context, collection, id string) error // Search Operations SearchByVector(ctx context.Context, collection string, vector []float32, opts *VectorSearchOptions) ([]VectorSearchResult, error) SearchByID(ctx context.Context, collection, id string, opts *VectorSearchOptions) ([]VectorSearchResult, error) // Lifecycle Operations Close() error }
VectorStorageProvider defines the interface that all storage providers must implement.
type VectorValidator ¶
type VectorValidator struct {
// contains filtered or unexported fields
}
VectorValidator provides validation utilities for vector storage operations.
func NewVectorValidator ¶
func NewVectorValidator(maxDimension int) *VectorValidator
NewVectorValidator creates a new validator with specified constraints.
func (*VectorValidator) ValidateCollection ¶
func (v *VectorValidator) ValidateCollection(config *VectorCollectionConfig) error
ValidateCollection validates a collection configuration.
func (*VectorValidator) ValidateDocument ¶
func (v *VectorValidator) ValidateDocument(doc *VectorDocument, config *VectorCollectionConfig) error
ValidateDocument validates a document before storage.
type XMLExtractor ¶ added in v0.3.0
type XMLExtractor struct {
// Target is a pointer to the struct where XML data should be unmarshaled.
Target interface{}
}
XMLExtractor implements ResponseExtractor for XML formatted responses.
func NewXMLExtractor ¶ added in v0.3.0
func NewXMLExtractor(target interface{}) *XMLExtractor
NewXMLExtractor creates a new XMLExtractor with the specified target struct.
func (*XMLExtractor) Extract ¶ added in v0.3.0
func (e *XMLExtractor) Extract(response LLMResponse) (interface{}, error)
Extract implements ResponseExtractor.Extract for XML data.
type ZapLogger ¶ added in v0.19.0
type ZapLogger struct {
// contains filtered or unexported fields
}
ZapLogger implements the Logger interface using uber-go/zap
func (*ZapLogger) Debug ¶ added in v0.19.0
func (l *ZapLogger) Debug(args ...interface{})
Debug log for ZapLogger
func (*ZapLogger) Error ¶ added in v0.19.0
func (l *ZapLogger) Error(args ...interface{})
Error log for ZapLogger
func (*ZapLogger) Info ¶ added in v0.19.0
func (l *ZapLogger) Info(args ...interface{})
Info log for ZapLogger
func (*ZapLogger) Warn ¶ added in v0.19.0
func (l *ZapLogger) Warn(args ...interface{})
Warn log for ZapLogger
func (*ZapLogger) WithContext ¶ added in v0.19.0
WithContext adds context to the logger and returns a new ZapLogger
func (*ZapLogger) WithErr ¶ added in v0.19.0
WithErr adds an error to the logger and returns a new ZapLogger
func (*ZapLogger) WithFields ¶ added in v0.19.0
WithFields adds fields to the logger and returns a new ZapLogger
Source Files
¶
- aws_bedrock.go
- aws_bedrock_mock_client.go
- chat_history_inmemory.go
- chat_history_sqlite.go
- chat_history_storage.go
- chat_history_types.go
- chunking.go
- doc.go
- embedding.go
- embedding_aws_bedrock_provider.go
- gemin_model_service.go
- llm.go
- llm_provider_anthropic.go
- llm_provider_anthropic_client.go
- llm_provider_anthropic_streaming.go
- llm_provider_aws_bedrock.go
- llm_provider_aws_bedrock_streaming.go
- llm_provider_gemini.go
- llm_provider_no_ops.go
- llm_provider_openai.go
- llm_provider_openai_client.go
- logger.go
- mcp_client.go
- mcp_client_sse.go
- mcp_client_std.go
- mcp_doc.go
- mcp_logging_manager.go
- mcp_server_base.go
- mcp_server_sse.go
- mcp_server_stdio.go
- mcp_types.go
- mcp_util.go
- prompt.go
- response_extractor.go
- tools_provider.go
- tracing.go
- tracing_llm_provider.go
- types.go
- vector_storage.go
- vector_storage_postgres.go
- vector_validation.go