Documentation
¶
Overview ¶
Package chat is a generated GoMock package.
Index ¶
- func NewProviderEventStream[TypeIn any](decoder streaming.Streamer[TypeIn], ...) streaming.Streamer[EventStream]
- func StreamChatMessageToChannel(ctx context.Context, stream streaming.Streamer[EventStream], ...) error
- func WithMaxTokens(tokens int) func(*ChatParams)
- func WithMessages(messages ...*ChatMessage) func(*ChatParams)
- func WithModel(model string) func(*ChatParams)
- func WithTemperature(temp float64) func(*ChatParams)
- func WithTools(tools ...Tool) func(*ChatParams)
- type AIContentSrc
- type CacheControl
- type ChatChoice
- type ChatMessage
- type ChatParams
- type ChatResponse
- type ChatUsage
- type EventStream
- type MessageContent
- func NewSourceContent(sourceType string, mediaType string, data []byte) *MessageContent
- func NewTextContent(text string) *MessageContent
- func NewToolResultContent(toolUseID, content string) *MessageContent
- func NewToolResultContentInterface(toolUseID string, content interface{}) (*MessageContent, error)
- func NewToolUseContent(id, name string, args json.RawMessage) *MessageContent
- type MessageContentType
- type MockProvider
- type MockProviderMockRecorder
- type Provider
- type Tool
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func NewProviderEventStream ¶
func NewProviderEventStream[TypeIn any]( decoder streaming.Streamer[TypeIn], handler streaming.StreamHandler[EventStream, TypeIn], ) streaming.Streamer[EventStream]
NewProviderEventStream creates a new stream that normalizes provider events
func StreamChatMessageToChannel ¶
func StreamChatMessageToChannel( ctx context.Context, stream streaming.Streamer[EventStream], ch chan<- EventStream, ) error
func WithMaxTokens ¶
func WithMaxTokens(tokens int) func(*ChatParams)
WithMaxTokens sets the max tokens for BaseChatMessageNewParams
func WithMessages ¶
func WithMessages( messages ...*ChatMessage, ) func(*ChatParams)
WithMessages sets the messages for BaseChatMessageNewParams
func WithModel ¶
func WithModel(model string) func(*ChatParams)
WithModel sets the model for BaseChatMessageNewParams
func WithTemperature ¶
func WithTemperature(temp float64) func(*ChatParams)
WithTemperature sets the temperature for BaseChatMessageNewParams
func WithTools ¶
func WithTools(tools ...Tool) func(*ChatParams)
WithTools sets the tools/functions for BaseChatMessageNewParams
Types ¶
type AIContentSrc ¶
type CacheControl ¶
type CacheControl struct {
Type string `json:"type"`
}
func NewCacheControlEphemeral ¶
func NewCacheControlEphemeral() *CacheControl
type ChatChoice ¶
type ChatChoice struct {
Role string `json:"role,omitempty"` // Always "assistant"
Content []*MessageContent `json:"content,omitempty"`
StopReason string `json:"stop_reason,omitempty"`
}
StopReason is the reason the model stopped generating messages. It can be one of: - `"end_turn"`: the model reached a natural stopping point - `"max_tokens"`: we exceeded the requested `max_tokens` or the model's maximum - `"stop_sequence"`: one of your provided custom `stop_sequences` was generated - `"tool_use"`: the model invoked one or more tools
type ChatMessage ¶
type ChatMessage struct {
Role string `json:"role"`
Content []*MessageContent `json:"content"`
}
func NewMessage ¶
func NewMessage(role string, content ...*MessageContent) *ChatMessage
func NewSystemMessage ¶
func NewSystemMessage(text string) *ChatMessage
func NewUserMessage ¶
func NewUserMessage(text string) *ChatMessage
func (*ChatMessage) SetCache ¶
func (cm *ChatMessage) SetCache()
type ChatParams ¶
type ChatParams struct {
Model string
MaxTokens int
Temperature float64
Messages []*ChatMessage
Stream bool
Tools []Tool
ToolChoice string // auto, any, tool
N *int // number of choice
}
func (*ChatParams) Update ¶
func (p *ChatParams) Update(opts ...func(*ChatParams))
type ChatResponse ¶
type ChatResponse struct {
ID string `json:"id,omitempty"`
Choice []ChatChoice `json:"choice,omitempty"`
Usage *ChatUsage `json:"usage,omitempty"`
Model string `json:"model,omitempty"`
}
func (*ChatResponse) HasContent ¶
func (cm *ChatResponse) HasContent() bool
func (*ChatResponse) ToMessageParams ¶
func (cm *ChatResponse) ToMessageParams() *ChatMessage
type ChatUsage ¶
type ChatUsage struct {
OutputTokens int `json:"output_tokens"`
OutputAudioTokens int `json:"output_audio_tokens"`
OutputReasoningTokens int `json:"output_reasoning_tokens"`
InputTokens int `json:"input_tokens"`
InputAudioTokens int `json:"input_audio_tokens"`
InputCachedTokens int `json:"input_cached_tokens"`
InputCacheCreationTokens int `json:"input_cache_creation_tokens"`
}
type EventStream ¶
type EventStream struct {
Type string // text_delta, message_start, message_stop, etc
Delta interface{}
Message *ChatResponse
}
EventStream represents a normalized stream event across providers
type MessageContent ¶
type MessageContent struct {
Type MessageContentType `json:"type"`
// Relevant for text content
Text string `json:"text,omitempty"`
PartialJson string `json:"partial_json,omitempty"`
// Relevant for tool usage calls (like "function calls")
ID string `json:"id,omitempty"` // Unique identifier for this tool call
Name string `json:"name,omitempty"` // Name of the tool to call
Input json.RawMessage `json:"input,omitempty"` // Arguments to pass to the tool
InputJson []byte `json:"-"` // Arguments to pass to the tool in json format
// Relevant for tool results
ToolUseID string `json:"tool_use_id,omitempty"` // ID of the tool call this result is for
Content string `json:"content,omitempty"` // Result returned from the tool
Source *AIContentSrc `json:"source,omitempty"` // Source of the content if type document/image
CacheControl *CacheControl `json:"cache_control,omitempty"` // Used to set cache
}
MessageContent holds text, tool calls, or other specialized content
func NewSourceContent ¶
func NewSourceContent(sourceType string, mediaType string, data []byte) *MessageContent
func NewTextContent ¶
func NewTextContent(text string) *MessageContent
NewTextContent creates a text content message
func NewToolResultContent ¶
func NewToolResultContent(toolUseID, content string) *MessageContent
NewToolResultContent creates a tool result content message
func NewToolResultContentInterface ¶
func NewToolResultContentInterface(toolUseID string, content interface{}) (*MessageContent, error)
func NewToolUseContent ¶
func NewToolUseContent(id, name string, args json.RawMessage) *MessageContent
NewToolUseContent creates a tool use content message
func (MessageContent) GetType ¶
func (c MessageContent) GetType() string
GetType returns the content type
func (*MessageContent) IsCacheable ¶
func (m *MessageContent) IsCacheable() bool
func (MessageContent) Raw ¶
func (c MessageContent) Raw() interface{}
Raw returns the entire struct as a generic interface
func (*MessageContent) SetCache ¶
func (m *MessageContent) SetCache()
func (MessageContent) String ¶
func (c MessageContent) String() string
String returns a human-readable string (for debugging/logging)
type MessageContentType ¶
type MessageContentType string
MessageContentType enumerates possible content types we handle
const ( // Common Anthropic/OpenAI ContentTypeText MessageContentType = "text" ContentTypeTextDelta MessageContentType = "text_delta" // Anthropic ContentTypeInputJsonDelta MessageContentType = "input_json_delta" ContentTypeToolUse MessageContentType = "tool_use" ContentTypeToolResult MessageContentType = "tool_result" ContentTypeDocument MessageContentType = "document" ContentTypeImage MessageContentType = "image" // OpenAI ContentTypeInputAudio MessageContentType = "input_audio" )
type MockProvider ¶
type MockProvider struct {
// contains filtered or unexported fields
}
MockProvider is a mock of Provider interface.
func NewMockProvider ¶
func NewMockProvider(ctrl *gomock.Controller) *MockProvider
NewMockProvider creates a new mock instance.
func (*MockProvider) EXPECT ¶
func (m *MockProvider) EXPECT() *MockProviderMockRecorder
EXPECT returns an object that allows the caller to indicate expected use.
func (*MockProvider) Send ¶
func (m *MockProvider) Send(ctx context.Context, params ChatParams) (*ChatResponse, error)
Send mocks base method.
func (*MockProvider) Stream ¶
func (m *MockProvider) Stream(ctx context.Context, params ChatParams) (streaming.Streamer[EventStream], error)
Stream mocks base method.
type MockProviderMockRecorder ¶
type MockProviderMockRecorder struct {
// contains filtered or unexported fields
}
MockProviderMockRecorder is the mock recorder for MockProvider.
type Provider ¶
type Provider interface {
// For a single-turn request
Send(ctx context.Context, params ChatParams) (*ChatResponse, error)
// For streaming support
Stream(
ctx context.Context,
params ChatParams,
) (streaming.Streamer[EventStream], error)
}
ChatProvider