Documentation
¶
Index ¶
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type FinishReason ¶
type FinishReason string
FinishReason represents the reason why the model finished generating a response
const ( // FinishReasonStop means the model reached a natural stopping point or the max tokens FinishReasonStop FinishReason = "stop" // FinishReasonLength means the model reached the token limit FinishReasonLength FinishReason = "length" // FinishReasonToolCalls means the model called a tool FinishReasonToolCalls FinishReason = "tool_calls" // FinishReasonFunctionCall is used when the model calls a function (legacy) FinishReasonFunctionCall FinishReason = "function_call" // FinishReasonContentFilter means the content was filtered FinishReasonContentFilter FinishReason = "content_filter" // FinishReasonNull means no finish reason was provided FinishReasonNull FinishReason = "null" )
type ImageURLDetail ¶
type ImageURLDetail string
const ( ImageURLDetailHigh ImageURLDetail = "high" ImageURLDetailLow ImageURLDetail = "low" ImageURLDetailAuto ImageURLDetail = "auto" )
type Message ¶
type Message struct { Role MessageRole `json:"role"` Content string `json:"content"` Refusal string `json:"refusal,omitempty"` MultiContent []MessagePart `json:"multi_content,omitempty"` // This property isn't in the official documentation, but it's in // the documentation for the official library for python: // - https://github.com/openai/openai-python/blob/main/chatml.md // - https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb Name string `json:"name,omitempty"` // This property is used for the "reasoning" feature supported by deepseek-reasoner // which is not in the official documentation. // the doc from deepseek: // - https://api-docs.deepseek.com/api/create-chat-completion#responses ReasoningContent string `json:"reasoning_content,omitempty"` FunctionCall *tools.FunctionCall `json:"function_call,omitempty"` // For Role=assistant prompts this may be set to the tool calls generated by the model, such as function calls. ToolCalls []tools.ToolCall `json:"tool_calls,omitempty"` // For Role=tool prompts this should be set to the ID given in the assistant's prior request to call a tool. ToolCallID string `json:"tool_call_id,omitempty"` // CreatedAt is the time the message was created CreatedAt string `json:"created_at,omitempty"` }
type MessageDelta ¶
type MessageDelta struct { Role string `json:"role,omitempty"` Content string `json:"content,omitempty"` FunctionCall *tools.FunctionCall `json:"function_call,omitempty"` ToolCalls []tools.ToolCall `json:"tool_calls,omitempty"` }
ChatCompletionDelta represents a delta/chunk in a streaming response
type MessageImageURL ¶
type MessageImageURL struct { URL string `json:"url,omitempty"` Detail ImageURLDetail `json:"detail,omitempty"` }
type MessagePart ¶
type MessagePart struct { Type MessagePartType `json:"type,omitempty"` Text string `json:"text,omitempty"` ImageURL *MessageImageURL `json:"image_url,omitempty"` }
type MessagePartType ¶
type MessagePartType string
const ( MessagePartTypeText MessagePartType = "text" MessagePartTypeImageURL MessagePartType = "image_url" )
type MessageRole ¶
type MessageRole string
const ( MessageRoleSystem MessageRole = "system" MessageRoleUser MessageRole = "user" MessageRoleAssistant MessageRole = "assistant" MessageRoleTool MessageRole = "tool" )
type MessageStream ¶
type MessageStream interface { // Recv gets the next completion chunk Recv() (MessageStreamResponse, error) // Close closes the stream Close() }
ChatCompletionStream interface represents a stream of chat completions
type MessageStreamChoice ¶
type MessageStreamChoice struct { Index int `json:"index"` Delta MessageDelta `json:"delta"` FinishReason FinishReason `json:"finish_reason,omitempty"` }
ChatCompletionStreamChoice represents a choice in a streaming response
type MessageStreamResponse ¶
type MessageStreamResponse struct { ID string `json:"id"` Object string `json:"object"` Created int64 `json:"created"` Model string `json:"model"` Choices []MessageStreamChoice `json:"choices"` Usage *Usage `json:"usage,omitempty"` }
ChatCompletionStreamResponse represents a streaming response from the model
Click to show internal directories.
Click to hide internal directories.