Documentation
¶
Index ¶
- type ChatCompletion
- type ChatCompletionChunk
- type ChatRequest
- type ChatStream
- type Choice
- type Client
- func (c *Client) Chat(ctx context.Context, params *schemas.ChatParams) (*schemas.ChatResponse, error)
- func (c *Client) ChatStream(ctx context.Context, params *schemas.ChatParams) (clients.ChatStream, error)
- func (c *Client) ModelName() string
- func (c *Client) Provider() string
- func (c *Client) SupportChatStream() bool
- type Config
- type ErrorMapper
- type Params
- type StreamChoice
- type Usage
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type ChatCompletion ¶
type ChatCompletion struct { ID string `json:"id"` Object string `json:"object"` Created int `json:"created"` ModelName string `json:"model"` SystemFingerprint string `json:"system_fingerprint"` Choices []Choice `json:"choices"` Usage Usage `json:"usage"` }
ChatCompletion Ref: https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#chat-completions
type ChatCompletionChunk ¶
type ChatCompletionChunk struct { ID string `json:"id"` Object string `json:"object"` Created int `json:"created"` ModelName string `json:"model"` SystemFingerprint string `json:"system_fingerprint"` Choices []StreamChoice `json:"choices"` }
ChatCompletionChunk represents SSEvent a chat response is broken down on chat streaming Ref: https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#chat-completions
type ChatRequest ¶
type ChatRequest struct { Messages []schemas.ChatMessage `json:"messages"` Temperature float64 `json:"temperature,omitempty"` TopP float64 `json:"top_p,omitempty"` MaxTokens int `json:"max_tokens,omitempty"` N int `json:"n,omitempty"` StopWords []string `json:"stop,omitempty"` Stream bool `json:"stream,omitempty"` FrequencyPenalty int `json:"frequency_penalty,omitempty"` PresencePenalty int `json:"presence_penalty,omitempty"` LogitBias *map[int]float64 `json:"logit_bias,omitempty"` User *string `json:"user,omitempty"` Seed *int `json:"seed,omitempty"` Tools []string `json:"tools,omitempty"` ToolChoice interface{} `json:"tool_choice,omitempty"` ResponseFormat interface{} `json:"response_format,omitempty"` }
ChatRequest is an Azure openai-specific request schema
func NewChatRequestFromConfig ¶
func NewChatRequestFromConfig(cfg *Config) *ChatRequest
NewChatRequestFromConfig fills the struct from the config. Not using reflection because of performance penalty it gives
func (*ChatRequest) ApplyParams ¶
func (r *ChatRequest) ApplyParams(params *schemas.ChatParams)
type ChatStream ¶
type ChatStream struct {
// contains filtered or unexported fields
}
ChatStream represents chat stream for a specific request
func NewChatStream ¶
func NewChatStream( tel *telemetry.Telemetry, client *http.Client, req *http.Request, finishReasonMapper *openai.FinishReasonMapper, errMapper *ErrorMapper, ) *ChatStream
func (*ChatStream) Close ¶
func (s *ChatStream) Close() error
func (*ChatStream) Open ¶
func (s *ChatStream) Open() error
Open initializes and opens a ChatStream.
func (*ChatStream) Recv ¶
func (s *ChatStream) Recv() (*schemas.ChatStreamChunk, error)
Recv receives a chat stream chunk from the ChatStream and returns a ChatStreamChunk object.
type Choice ¶
type Choice struct { Index int `json:"index"` Message schemas.ChatMessage `json:"message"` Logprobs interface{} `json:"logprobs"` FinishReason string `json:"finish_reason"` }
type Client ¶
type Client struct {
// contains filtered or unexported fields
}
Client is a client for accessing Azure OpenAI API
func NewClient ¶
func NewClient(providerConfig *Config, clientConfig *clients.ClientConfig, tel *telemetry.Telemetry) (*Client, error)
NewClient creates a new Azure OpenAI client for the OpenAI API.
func (*Client) Chat ¶
func (c *Client) Chat(ctx context.Context, params *schemas.ChatParams) (*schemas.ChatResponse, error)
Chat sends a chat request to the specified azure openai model.
func (*Client) ChatStream ¶
func (c *Client) ChatStream(ctx context.Context, params *schemas.ChatParams) (clients.ChatStream, error)
func (*Client) SupportChatStream ¶
type Config ¶
type Config struct { BaseURL string `yaml:"base_url" json:"base_url" validate:"required"` // The name of your Azure OpenAI Resource (e.g https://glide-test.openai.azure.com/) ChatEndpoint string `yaml:"chat_endpoint" json:"chat_endpoint"` ModelName string `yaml:"model" json:"model" validate:"required"` // This is your deployment name. You're required to first deploy a model before you can make calls (e.g. glide-gpt-35) APIVersion string `yaml:"api_version" json:"apiVersion" validate:"required"` // The API version to use for this operation. This follows the YYYY-MM-DD format (e.g 2023-05-15) APIKey fields.Secret `yaml:"api_key" json:"-" validate:"required"` DefaultParams *Params `yaml:"default_params,omitempty" json:"default_params"` }
func (*Config) UnmarshalYAML ¶
type ErrorMapper ¶
type ErrorMapper struct {
// contains filtered or unexported fields
}
func NewErrorMapper ¶
func NewErrorMapper(tel *telemetry.Telemetry) *ErrorMapper
type Params ¶
type Params struct { Temperature float64 `yaml:"temperature,omitempty" json:"temperature"` TopP float64 `yaml:"top_p,omitempty" json:"top_p"` MaxTokens int `yaml:"max_tokens,omitempty" json:"max_tokens"` N int `yaml:"n,omitempty" json:"n"` StopWords []string `yaml:"stop,omitempty" json:"stop"` FrequencyPenalty int `yaml:"frequency_penalty,omitempty" json:"frequency_penalty"` PresencePenalty int `yaml:"presence_penalty,omitempty" json:"presence_penalty"` LogitBias *map[int]float64 `yaml:"logit_bias,omitempty" json:"logit_bias"` User *string `yaml:"user,omitempty" json:"user"` Seed *int `yaml:"seed,omitempty" json:"seed"` Tools []string `yaml:"tools,omitempty" json:"tools"` ToolChoice interface{} `yaml:"tool_choice,omitempty" json:"tool_choice"` ResponseFormat interface{} `yaml:"response_format,omitempty" json:"response_format"` // TODO: should this be a part of the chat request API? }
Params defines OpenAI-specific model params with the specific validation of values TODO: Add validations
func DefaultParams ¶
func DefaultParams() Params
func (*Params) UnmarshalYAML ¶
type StreamChoice ¶
type StreamChoice struct { Index int `json:"index"` Delta schemas.ChatMessage `json:"delta"` FinishReason string `json:"finish_reason"` }