Documentation
¶
Index ¶
- Constants
- Variables
- type ChatCompletionResponse
- type ChatCompletionResponseChoice
- type ChatCompletionResponseChoiceStream
- type ChatCompletionStreamResponse
- type ChatMessage
- type ChatRequestParams
- type Content
- type DeltaMessage
- type Dimensions
- type Document
- type EmbeddingObject
- type EmbeddingResponse
- type FIMCompletionResponse
- type FIMCompletionResponseChoice
- type FIMRequestParams
- type FinishReason
- type Function
- type FunctionCall
- type MistralAPIError
- type MistralClient
- func (c *MistralClient) Chat(model string, messages []ChatMessage, params *ChatRequestParams) (*ChatCompletionResponse, error)
- func (c *MistralClient) ChatStream(model string, messages []ChatMessage, params *ChatRequestParams) (<-chan ChatCompletionStreamResponse, error)
- func (c *MistralClient) Embeddings(model string, input []string) (*EmbeddingResponse, error)
- func (c *MistralClient) FIM(params *FIMRequestParams) (*FIMCompletionResponse, error)
- func (c *MistralClient) ListModels() (*ModelList, error)
- func (c *MistralClient) OCR(model string, document Document, params *OcrParams) (*OcrDocument, error)
- func (c *MistralClient) Vision(model string, messages []VisionMessage, params *ChatRequestParams) (*ChatCompletionResponse, error)
- type MistralConnectionError
- type MistralError
- type ModelCard
- type ModelList
- type ModelPermission
- type OcrDocument
- type OcrParams
- type OcrUsageInfo
- type Page
- type ResponseFormat
- type TextContent
- type Tool
- type ToolCall
- type ToolType
- type UsageInfo
- type VisionContent
- type VisionMessage
Constants ¶
const ( Endpoint = "https://api.mistral.ai" CodestralEndpoint = "https://codestral.mistral.ai" DefaultMaxRetries = 5 DefaultTimeout = 120 * time.Second )
const ( ModelMistralLargeLatest = "mistral-large-latest" ModelMistralMediumLatest = "mistral-medium-latest" ModelMistralSmallLatest = "mistral-small-latest" ModelCodestralLatest = "codestral-latest" ModelOpenMixtral8x7b = "open-mixtral-8x7b" ModelOpenMixtral8x22b = "open-mixtral-8x22b" ModelOpenMistral7b = "open-mistral-7b" ModelMistralLarge2402 = "mistral-large-2402" ModelMistralMedium2312 = "mistral-medium-2312" ModelMistralSmall2402 = "mistral-small-2402" ModelMistralSmall2312 = "mistral-small-2312" ModelMistralTiny = "mistral-tiny-2312" )
const ( RoleUser = "user" RoleAssistant = "assistant" RoleSystem = "system" RoleTool = "tool" )
const ( ToolChoiceAny = "any" ToolChoiceAuto = "auto" ToolChoiceNone = "none" )
Variables ¶
var DefaultChatRequestParams = ChatRequestParams{ Temperature: 1, TopP: 1, RandomSeed: 42069, MaxTokens: 4000, SafePrompt: false, }
Functions ¶
This section is empty.
Types ¶
type ChatCompletionResponse ¶
type ChatCompletionResponse struct { ID string `json:"id"` Object string `json:"object"` Created int `json:"created"` Model string `json:"model"` Choices []ChatCompletionResponseChoice `json:"choices"` Usage UsageInfo `json:"usage"` }
ChatCompletionResponse represents the response from the chat completion endpoint.
type ChatCompletionResponseChoice ¶
type ChatCompletionResponseChoice struct { Index int `json:"index"` Message ChatMessage `json:"message"` FinishReason FinishReason `json:"finish_reason,omitempty"` }
ChatCompletionResponseChoice represents a choice in the chat completion response.
type ChatCompletionResponseChoiceStream ¶
type ChatCompletionResponseChoiceStream struct { Index int `json:"index"` Delta DeltaMessage `json:"delta"` FinishReason FinishReason `json:"finish_reason,omitempty"` }
ChatCompletionResponseChoice represents a choice in the chat completion response.
type ChatCompletionStreamResponse ¶
type ChatCompletionStreamResponse struct { ID string `json:"id"` Model string `json:"model"` Choices []ChatCompletionResponseChoiceStream `json:"choices"` Created int `json:"created,omitempty"` Object string `json:"object,omitempty"` Usage UsageInfo `json:"usage,omitempty"` Error error `json:"error,omitempty"` }
ChatCompletionStreamResponse represents the streamed response from the chat completion endpoint.
type ChatMessage ¶
type ChatMessage struct { Role string `json:"role"` Content string `json:"content"` ToolCalls []ToolCall `json:"tool_calls,omitempty"` }
ChatMessage represents a single message in a chat.
type ChatRequestParams ¶
type ChatRequestParams struct { Temperature float64 `json:"temperature"` // The temperature to use for sampling. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or TopP but not both. TopP float64 `json:"top_p"` // An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or Temperature but not both. RandomSeed int `json:"random_seed"` MaxTokens int `json:"max_tokens"` SafePrompt bool `json:"safe_prompt"` // Adds a Mistral defined safety message to the system prompt to enforce guardrailing Tools []Tool `json:"tools"` ToolChoice string `json:"tool_choice"` ResponseFormat ResponseFormat `json:"response_format"` }
ChatRequestParams represents the parameters for the Chat/ChatStream method of MistralClient.
type DeltaMessage ¶
type DeltaMessage struct { Role string `json:"role"` Content string `json:"content"` ToolCalls []ToolCall `json:"tool_calls"` }
DeltaMessage represents the delta between the prior state of the message and the new state of the message when streaming responses.
type Dimensions ¶
type EmbeddingObject ¶
type EmbeddingObject struct { Object string `json:"object"` Embedding []float64 `json:"embedding"` Index int `json:"index"` }
EmbeddingObject represents an embedding object in the response.
type EmbeddingResponse ¶
type EmbeddingResponse struct { ID string `json:"id"` Object string `json:"object"` Data []EmbeddingObject `json:"data"` Model string `json:"model"` Usage UsageInfo `json:"usage"` }
EmbeddingResponse represents the response from the embeddings endpoint.
type FIMCompletionResponse ¶
type FIMCompletionResponse struct { ID string `json:"id"` Object string `json:"object"` Created int `json:"created"` Model string `json:"model"` Choices []FIMCompletionResponseChoice `json:"choices"` Usage UsageInfo `json:"usage"` }
FIMCompletionResponse represents the response from the FIM completion endpoint.
type FIMCompletionResponseChoice ¶
type FIMCompletionResponseChoice struct { Index int `json:"index"` Message ChatMessage `json:"message"` FinishReason FinishReason `json:"finish_reason,omitempty"` }
FIMCompletionResponseChoice represents a choice in the FIM completion response.
type FIMRequestParams ¶
type FIMRequestParams struct { Model string `json:"model"` Prompt string `json:"prompt"` Suffix string `json:"suffix"` MaxTokens int `json:"max_tokens"` Temperature float64 `json:"temperature"` Stop []string `json:"stop,omitempty"` }
FIMRequestParams represents the parameters for the FIM method of MistralClient.
type FinishReason ¶
type FinishReason string
FinishReason the reason that a chat message was finished
const ( FinishReasonStop FinishReason = "stop" FinishReasonLength FinishReason = "length" FinishReasonError FinishReason = "error" )
type Function ¶
type Function struct { Name string `json:"name"` Description string `json:"description"` Parameters any `json:"parameters"` }
Function definition of a function that the llm can call including its parameters
type FunctionCall ¶
FunctionCall represents a request to call an external tool by the llm
type MistralAPIError ¶
type MistralAPIError struct { MistralError HTTPStatus int Headers map[string][]string }
MistralAPIError is returned when the API responds with an error message.
func NewMistralAPIError ¶
func NewMistralAPIError(message string, httpStatus int, headers map[string][]string) *MistralAPIError
func (*MistralAPIError) Error ¶
func (e *MistralAPIError) Error() string
type MistralClient ¶
type MistralClient struct {
// contains filtered or unexported fields
}
func NewCodestralClientDefault ¶
func NewCodestralClientDefault(apiKey string) *MistralClient
NewCodestralClientDefault creates a new Codestral API client with the default endpoint and the given API key. Defaults to using CODESTRAL_API_KEY from the environment.
func NewMistralClient ¶
func NewMistralClientDefault ¶
func NewMistralClientDefault(apiKey string) *MistralClient
NewMistralClientDefault creates a new Mistral API client with the default endpoint and the given API key. Defaults to using MISTRAL_API_KEY from the environment.
func (*MistralClient) Chat ¶
func (c *MistralClient) Chat(model string, messages []ChatMessage, params *ChatRequestParams) (*ChatCompletionResponse, error)
func (*MistralClient) ChatStream ¶
func (c *MistralClient) ChatStream(model string, messages []ChatMessage, params *ChatRequestParams) (<-chan ChatCompletionStreamResponse, error)
ChatStream sends a chat message and returns a channel to receive streaming responses.
func (*MistralClient) Embeddings ¶
func (c *MistralClient) Embeddings(model string, input []string) (*EmbeddingResponse, error)
func (*MistralClient) FIM ¶
func (c *MistralClient) FIM(params *FIMRequestParams) (*FIMCompletionResponse, error)
FIM sends a FIM request and returns the completion response.
func (*MistralClient) ListModels ¶
func (c *MistralClient) ListModels() (*ModelList, error)
func (*MistralClient) OCR ¶
func (c *MistralClient) OCR(model string, document Document, params *OcrParams) (*OcrDocument, error)
func (*MistralClient) Vision ¶
func (c *MistralClient) Vision(model string, messages []VisionMessage, params *ChatRequestParams) (*ChatCompletionResponse, error)
type MistralConnectionError ¶
type MistralConnectionError struct {
MistralError
}
MistralConnectionError is returned when the SDK cannot reach the API server for any reason.
func NewMistralConnectionError ¶
func NewMistralConnectionError(message string) *MistralConnectionError
type MistralError ¶
type MistralError struct {
Message string
}
MistralError is the base error type for all Mistral errors.
func (*MistralError) Error ¶
func (e *MistralError) Error() string
type ModelCard ¶
type ModelCard struct { ID string `json:"id"` Object string `json:"object"` Created int `json:"created"` OwnedBy string `json:"owned_by"` Root string `json:"root,omitempty"` Parent string `json:"parent,omitempty"` Permission []ModelPermission `json:"permission"` }
ModelCard represents a model card.
type ModelPermission ¶
type ModelPermission struct { ID string `json:"id"` Object string `json:"object"` Created int `json:"created"` AllowCreateEngine bool `json:"allow_create_engine"` AllowSampling bool `json:"allow_sampling"` AllowLogprobs bool `json:"allow_logprobs"` AllowSearchIndices bool `json:"allow_search_indices"` AllowView bool `json:"allow_view"` AllowFineTuning bool `json:"allow_fine_tuning"` Organization string `json:"organization"` Group string `json:"group,omitempty"` IsBlocking bool `json:"is_blocking"` }
ModelPermission represents the permissions of a model.
type OcrDocument ¶
type OcrDocument struct { Pages []Page `json:"pages"` Model string `json:"model"` UsageInfo OcrUsageInfo `json:"usage_info"` }
type OcrUsageInfo ¶
type Page ¶
type Page struct { Index int `json:"index"` Markdown string `json:"markdown"` Images []string `json:"images"` Dimensions Dimensions `json:"dimensions"` }
type ResponseFormat ¶
type ResponseFormat string
ResponseFormat the format that the response must adhere to
const ( ResponseFormatText ResponseFormat = "text" ResponseFormatJsonObject ResponseFormat = "json_object" )
type TextContent ¶
type ToolCall ¶
type ToolCall struct { Id string `json:"id"` Type ToolType `json:"type"` Function FunctionCall `json:"function"` }
ToolCall represents the call to a tool by the llm
type ToolType ¶
type ToolType string
ToolType type of tool defined for the llm
const (
ToolTypeFunction ToolType = "function"
)
type UsageInfo ¶
type UsageInfo struct { PromptTokens int `json:"prompt_tokens"` TotalTokens int `json:"total_tokens"` CompletionTokens int `json:"completion_tokens,omitempty"` }
UsageInfo represents the usage information of a response.