gpt_4

package
v0.0.0-...-70eaef7 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Sep 4, 2024 License: AGPL-3.0 Imports: 4 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type ChatCompletionResponse

type ChatCompletionResponse struct {
	ID      string   `json:"id"`
	Object  string   `json:"object"`
	Created int64    `json:"created"`
	Choices []Choice `json:"choices"`
	Usage   struct {
		PromptTokens     int `json:"prompt_tokens"`
		CompletionTokens int `json:"completion_tokens"`
		TotalTokens      int `json:"total_tokens"`
	} `json:"usage"`
}

type ChatRequest

type ChatRequest struct {
	Model            string             `json:"model"`                       // ID of the model to use. Currently, only gpt-3.5-turbo and gpt-4 are supported. Required.
	Messages         []Message          `json:"messages"`                    // The messages to generate chat completions for, in the chat format. Required.
	Temperature      float64            `json:"temperature,omitempty"`       // What sampling temperature to use, between 0 and 2. Optional, defaults to 1.
	TopP             float64            `json:"top_p,omitempty"`             // An alternative to sampling with temperature, where the model considers the results of the tokens with top_p probability mass. Optional, defaults to 1.
	N                int                `json:"n,omitempty"`                 // How many chat completion choices to generate for each input message. Optional, defaults to 1.
	Stream           bool               `json:"stream,omitempty"`            // *** NOT IMPLEMENTED *** If set, partial message deltas will be sent. Optional, defaults to false.
	Stop             interface{}        `json:"stop,omitempty"`              // Up to 4 sequences where the API will stop generating further tokens. Optional, defaults to null.
	MaxTokens        int                `json:"max_tokens,omitempty"`        // The maximum number of tokens allowed for the generated answer. Optional, defaults to inf.
	PresencePenalty  float64            `json:"presence_penalty,omitempty"`  // Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far. Optional, defaults to 0.
	FrequencyPenalty float64            `json:"frequency_penalty,omitempty"` // Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far. Optional, defaults to 0.
	LogitBias        map[string]float64 `json:"logit_bias,omitempty"`        // Modify the likelihood of specified tokens appearing in the completion. Optional, defaults to null.
	User             string             `json:"user,omitempty"`              // A unique identifier representing your end-user. Optional.
}

type Choice

type Choice struct {
	Index        int     `json:"index"`
	Message      Message `json:"message"`
	FinishReason string  `json:"finish_reason"`
}

type Message

type Message struct {
	Role    string `json:"role"`
	Content string `json:"content"`
}

type Params

type Params struct {
	API_TOKEN          string      `json:"api_token,omitempty"`            // This will be stripped from the request before sending to the API. Required.
	StripNewline       bool        `json:"strip_newline,omitempty"`        // If set, the API will strip newlines from the beginning of the generated text. Optional, defaults to false.
	Request            ChatRequest `json:"request,omitempty"`              // The request body to send to the API. Required.
	KeepMessageHistory bool        `json:"keep_message_history,omitempty"` // If set, the message history will be kept in the response. Optional, defaults to false.
	MessageHistory     []Message   `json:"message_history,omitempty"`      // The message history to use. Optional, defaults to null.
}

func Init

func Init(userParams Params) (*Params, error)

func (*Params) ClearHistory

func (params *Params) ClearHistory(msg string)

func (*Params) Query

func (params *Params) Query(msg string) ([]Choice, error)

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL