openai

package module
v1.0.3 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Nov 18, 2024 License: Apache-2.0 Imports: 14 Imported by: 0

README

openai-go-sdk

OpenAI Docs API Reference: https://platform.openai.com/docs/api-reference/introduction

Installation

Use go get to install SDK:

go get github.com/shenhaofang/openai-go-sdk

Usage

package main

import (
	"bytes"
	"context"
	"fmt"
	"io"
	"log"
	"net/http"
	"time"

	"github.com/shenhaofang/openai-go-sdk"
)

const (
	DefaultRequestTimeout      = 30 * time.Second
	DefaultMaxIdleConns        = 100
	DefaultMaxIdleConnsPerHost = 50
	DefaultMaxConnsPerHost     = 200
	DefaultIdleConnTimeout     = 20 * time.Minute

	APIKey  = "your-api-key"
	BaseUrl = "https://dashscope.aliyuncs.com/compatible-mode/v1"
)

func main() {
	// Create a new client
	aiClient := openai.NewAIClient(APIKey, BaseUrl, openai.ClientDefaultParamOption{
		MaxToken: 1500,
	}, openai.ClientHTTPClientOption{
		Client: &http.Client{
			Transport: &http.Transport{
				MaxIdleConns:        DefaultMaxIdleConns,
				MaxIdleConnsPerHost: DefaultMaxIdleConnsPerHost,
				MaxConnsPerHost:     DefaultMaxConnsPerHost,
				IdleConnTimeout:     DefaultIdleConnTimeout,
			},
			Timeout: DefaultRequestTimeout,
		},
	})

	// Create a new context
	ctx := context.Background()

	// text/json
	// Create a new completion request
	chatParam := openai.OpenAIChatParam{
		Model: "qwen-vl-plus",
		Message: []openai.Message{
			{
				Role:    openai.RoleSystem,
				Content: openai.TextContent("你是一个图片解析助手"),
			},
			{
				Role: openai.RoleUser,
				Content: openai.UserArrContent{
					openai.UserImgContent{
						Type: "image_url",
						ImageURL: openai.ImgURL{
							URL: "https://wxls-cms.oss-cn-hangzhou.aliyuncs.com/online/2024-04-18/218da022-f4bf-456a-99af-5cb8e157f7b8.jpg",
						},
					},
					openai.UserImgContent{
						Type: "image_url",
						ImageURL: openai.ImgURL{
							URL: "https://dashscope.oss-cn-beijing.aliyuncs.com/images/dog_and_girl.jpeg",
						},
					},
					openai.UserTextContent{
						Type: "text",
						Text: `请问这些图片里边都是啥?`,
					},
				},
			},
		},
		TopP: 0.1,
	}

	// make chat request
	aiChatReq, err := aiClient.MakeChatRequest("chat/completions", chatParam)
	if err != nil {
		log.Fatalf("Error creating request: %v", err)
		return
	}
	// send msg to ai
	aiChatResp, err := aiChatReq.GetResp(ctx)
	if err != nil {
		log.Fatalf("Error get resp: %v", err)
		return
	}

	// get ai response
	resChat, err := aiChatResp.Get()
	if err != nil {
		log.Fatalf("Error get msg from ai resp: %v", err)
		return
	}
	// Print the response content
	fmt.Println(resChat.Choices[0].Message.Content)

	/**
	 * stream request
	 */
	// text/event-stream
	chatParam.Stream = true

	// make chat request
	streamChatReq, err := aiClient.MakeChatRequest("chat/completions", chatParam)
	if err != nil {
		log.Fatalf("Error creating request: %v", err)
		return
	}
	// send msg to ai
	streamChatResp, err := streamChatReq.GetResp(ctx)
	if err != nil {
		log.Fatalf("Error get resp: %v", err)
		return
	}

	defer streamChatResp.Close()
	var resGot *openai.RespAIChatStream
	res := ""
	for resGot, err = streamChatResp.Recv(); err == nil && resGot.Error == nil; resGot, err = streamChatResp.Recv() {
		if resGot.Choices[0].Delta.Content == "" {
			continue
		}
		res += resGot.Choices[0].Delta.Content
	}
	if err == io.EOF {
		err = nil
	}
	if err != nil {
		log.Fatalf("Error get msg from ai resp: %v", err)
		return
	}
	if resGot != nil && resGot.Error != nil {
		log.Fatalf("Error get msg from ai resp: %v", resGot.Error)
		return
	}
	// Print the response text
	fmt.Println(res)

	/**
	 * file upload request
	 */
	fileText := bytes.NewReader([]byte(`《侠客行》 - 李白

	赵客缦胡缨,吴钩霜雪明。银鞍照白马,飒沓如流星。

	十步杀一人,千里不留行。事了拂衣去,深藏身与名。

	闲过信陵饮,脱剑膝前横。将炙啖朱亥,持觞劝侯嬴。

	三杯吐然诺,五岳倒为轻。眼花耳热后,意气素霓生。

	救赵挥金锤,邯郸先震惊。千秋二壮士,烜赫大梁城。

	纵死侠骨香,不惭世上英。谁能书阁下,白首太玄经。`))

	// make chat request
	fileInfo, err := aiClient.UpdateFile("files", openai.OpenAIFileCreateParam{
		File:     fileText,
		FileName: "test.txt",
		Purpose:  "file-extract",
	})
	if err != nil {
		log.Fatalf("Error get msg from ai resp: %v", err)
		return
	}
	fmt.Println(fileInfo)

	fileInfo2, err := aiClient.RetrieveFile("files", fileInfo.ID)
	if err != nil {
		log.Fatalf("Error get msg from ai resp: %v", err)
		return
	}
	fmt.Println(fileInfo2)

	// 请求ai大模型
	chatParam = openai.OpenAIChatParam{
		Model: "qwen-long",
		Message: []openai.Message{
			{
				Role:    openai.RoleSystem,
				Content: openai.TextContent("你是一个文档小助手,辅助用户完成文档解读"),
			},
			{
				Role:    openai.RoleSystem,
				Content: openai.TextContent("fileid://" + fileInfo.ID),
			},
			{
				Role:    openai.RoleUser,
				Content: openai.TextContent("这都写了点啥?给说说呗!"),
			},
		},
		TopP: 0.3,
	}

	// make chat request
	aiChatReq, err = aiClient.MakeChatRequest("chat/completions", chatParam)
	if err != nil {
		log.Fatalf("Error creating request: %v", err)
		return
	}
	// send msg to ai
	aiChatResp, err = aiChatReq.GetResp(ctx)
	if err != nil {
		log.Fatalf("Error get resp: %v", err)
		return
	}

	// get ai response
	resChat, err = aiChatResp.Get()
	if err != nil {
		log.Fatalf("Error get msg from ai resp: %v", err)
		return
	}
	// Print the response content
	fmt.Println(resChat.Choices[0].Message.Content)
}

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func SetArrMsgContentItemMatcher

func SetArrMsgContentItemMatcher(matcher ArrMsgContentItemMatcher)

Types

type AIClient

type AIClient struct {
	Key                string
	BaseURL            string
	DefaultMaxToken    int64
	DefaultTemperature float64
	// contains filtered or unexported fields
}

func NewAIClient

func NewAIClient(key string, baseURL string, opts ...ClientOption) *AIClient

func (*AIClient) DeleteFile added in v1.0.3

func (c *AIClient) DeleteFile(fileID string) (*DeleteFileResp, error)

func (*AIClient) ListFiles added in v1.0.3

func (c *AIClient) ListFiles(param OpenAIListFilesParam) (*ListFilesResp, error)

func (*AIClient) MakeChatReqBytes added in v1.0.1

func (c *AIClient) MakeChatReqBytes(param OpenAIChatParam) (reqByts []byte, err error)

func (*AIClient) MakeChatRequest

func (c *AIClient) MakeChatRequest(method string, param OpenAIChatParam) (*AIRequest, error)

func (*AIClient) RetrieveFile added in v1.0.1

func (c *AIClient) RetrieveFile(method string, fileID string) (*FileInfo, error)

func (*AIClient) UpdateFile added in v1.0.1

func (c *AIClient) UpdateFile(method string, param OpenAIFileCreateParam) (*FileInfo, error)

func (*AIClient) WithOptions

func (c *AIClient) WithOptions(opts ...ClientOption) *AIClient

type AIError

type AIError struct {
	Code    string      `json:"code"`
	Type    string      `json:"type"`
	Message string      `json:"message"`
	Param   interface{} `json:"param"`
}

func (*AIError) Error

func (e *AIError) Error() string

type AIReqOption

type AIReqOption interface {
	Set(r *AIRequest)
}

type AIRequest

type AIRequest struct {
	IsStream bool
	// contains filtered or unexported fields
}

func (*AIRequest) GetResp

func (r *AIRequest) GetResp(ctx context.Context) (*RespOpenAI, error)

func (*AIRequest) WithOptions

func (r *AIRequest) WithOptions(opts ...AIReqOption) *AIRequest

type ArrMsgContentItemMatcher

type ArrMsgContentItemMatcher interface {
	MatchContentItem(keyword string) UserArrContentItem
}

array消息内容元素匹配器,用于解析array型content字段时,匹配到合适的元素类型

type ChatChoice

type ChatChoice struct {
	Index        int          `json:"index"`
	Message      Message      `json:"message"`
	FinishReason FinishReason `json:"finish_reason"`
}

https://platform.openai.com/docs/api-reference/chat/object

type ChatCompletionStreamChoiceDelta

type ChatCompletionStreamChoiceDelta struct {
	Content   string     `json:"content,omitempty"`
	Role      string     `json:"role,omitempty"`
	Refusal   string     `json:"refusal,omitempty"` // The refusal message generated by the model.
	ToolCalls []ToolFunc `json:"tool_calls,omitempty"`
}

type ChatUsage

type ChatUsage struct {
	PromptTokens     int `json:"prompt_tokens"`
	CompletionTokens int `json:"completion_tokens"`
	TotalTokens      int `json:"total_tokens"`
}

type ClientDefaultParamOption

type ClientDefaultParamOption struct {
	MaxToken    int64
	Temperature float64
}

func (ClientDefaultParamOption) Set

type ClientHTTPClientOption

type ClientHTTPClientOption struct {
	Client *http.Client
}

func (ClientHTTPClientOption) Set

func (o ClientHTTPClientOption) Set(c *AIClient)

type ClientOption

type ClientOption interface {
	Set(c *AIClient)
}

type ContentFilterResults

type ContentFilterResults struct {
	Hate     Hate     `json:"hate,omitempty"`
	SelfHarm SelfHarm `json:"self_harm,omitempty"`
	Sexual   Sexual   `json:"sexual,omitempty"`
	Violence Violence `json:"violence,omitempty"`
}

type DefaultArrMsgContentItemMatcher

type DefaultArrMsgContentItemMatcher struct{}

func (DefaultArrMsgContentItemMatcher) MatchContentItem

func (DefaultArrMsgContentItemMatcher) MatchContentItem(keyword string) UserArrContentItem

type DeleteFileResp added in v1.0.3

type DeleteFileResp struct {
	ID      string `json:"id"`
	Object  string `json:"object"`
	Deleted bool   `json:"deleted"`
	Error   *AIError
}

type FileInfo added in v1.0.1

type FileInfo struct {
	ID            string `json:"id"`             // The file identifier, which can be referenced in the API endpoints.
	Bytes         int    `json:"bytes"`          // The size of the file, in bytes.
	CreatedAt     int64  `json:"created_at"`     //The Unix timestamp (in seconds) for when the file was created.
	FileName      string `json:"filename"`       // The name of the file.
	Object        string `json:"object"`         // The object type, which is always file.
	Purpose       string `json:"purpose"`        // The intended purpose of the file. Supported values are assistants, assistants_output, batch, batch_output, fine-tune, fine-tune-results and vision.
	Status        string `json:"status"`         // Deprecated
	StatusDetails string `json:"status_details"` // Deprecated
}

https://platform.openai.com/docs/api-reference/files/object

type FileResp added in v1.0.1

type FileResp struct {
	FileInfo
	Error *AIError `json:"error,omitempty"`
}

type FinishReason

type FinishReason string
const (
	// The reason the model stopped generating tokens.
	// This will be stop if the model hit a natural stop point or a provided stop sequence,
	// length if the maximum number of tokens specified in the request was reached,
	// content_filter if content was omitted due to a flag from our content filters,
	// tool_calls if the model called a tool,
	// or function_call (deprecated) if the model called a function.
	FinishReasonStop          FinishReason = "stop"
	FinishReasonLength        FinishReason = "length"
	FinishReasonFunctionCall  FinishReason = "function_call"
	FinishReasonToolCalls     FinishReason = "tool_calls"
	FinishReasonContentFilter FinishReason = "content_filter"
	FinishReasonNull          FinishReason = "null"
)

type Hate

type Hate struct {
	Filtered bool   `json:"filtered"`
	Severity string `json:"severity,omitempty"`
}

type HeadOption

type HeadOption struct {
	Header map[string]string
}

func (HeadOption) Set

func (o HeadOption) Set(r *AIRequest)

type ImgURL

type ImgURL struct {
	URL    string `json:"url"`
	Detail string `json:"detail,omitempty"`
}

type ListFilesResp added in v1.0.3

type ListFilesResp struct {
	Data     []FileInfo `json:"data"`
	Object   string     `json:"object"`
	NextPage int        `json:"next_page"`
	HasMore  bool       `json:"has_more"`
	Error    *AIError   `json:"error,omitempty"`
}

type Message

type Message struct {
	Role       Role       `json:"role"`
	Name       string     `json:"name,omitempty"`
	Content    MsgContent `json:"content"`
	Refusal    string     `json:"refusal,omitempty"` // The refusal message generated by the model.
	ToolCalls  []ToolFunc `json:"tool_calls,omitempty"`
	ToolCallID string     `json:"tool_call_id,omitempty"`
}

func (*Message) UnmarshalJSON

func (p *Message) UnmarshalJSON(input []byte) error

type MsgBase

type MsgBase struct {
	Role       Role       `json:"role"`
	Name       string     `json:"name,omitempty"`
	Refusal    string     `json:"refusal,omitempty"` // The refusal message generated by the model.
	ToolCalls  []ToolFunc `json:"tool_calls,omitempty"`
	ToolCallID string     `json:"tool_call_id,omitempty"`
}

type MsgContent

type MsgContent interface {
	ContentType() string
	MatchRole(role Role) bool
	Text() []string
	CastToTextContent() (TextContent, bool)
	CastToUserArrContent() (UserArrContent, bool)
}

type OpenAIChatParam

type OpenAIChatParam struct {
	Model       string    `json:"model"`
	Message     []Message `json:"messages"`
	Temperature float64   `json:"temperature,omitempty"`
	// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass.
	// So 0.1 means only the tokens comprising the top 10% probability mass are considered.
	// We generally recommend altering this or temperature but not both.
	TopP             float64  `json:"top_p,omitempty"`
	N                int      `json:"n,omitempty"`
	Stream           bool     `json:"stream"`
	Stop             []string `json:"stop,omitempty"`
	MaxTokens        int64    `json:"max_tokens,omitempty"`
	FrequencyPenalty int      `json:"frequency_penalty"`
	PresencePenalty  float64  `json:"presence_penalty"`
	Seed             uint64   `json:"seed,omitempty"`
	User             string   `json:"user,omitempty"`
}

https://platform.openai.com/docs/api-reference/chat/create

type OpenAIFileCreateParam added in v1.0.1

type OpenAIFileCreateParam struct {
	File     io.Reader `json:"file"`
	FileName string    `json:"file_name"`
	Purpose  string    `json:"purpose"`
	// contains filtered or unexported fields
}

type OpenAIListFilesParam added in v1.0.3

type OpenAIListFilesParam struct {
	Purpose string
	Limit   int
	Order   string
	After   string
}

func (OpenAIListFilesParam) ToQuery added in v1.0.3

func (o OpenAIListFilesParam) ToQuery() string

type PromptAnnotation

type PromptAnnotation struct {
	PromptIndex          int                  `json:"prompt_index,omitempty"`
	ContentFilterResults ContentFilterResults `json:"content_filter_results,omitempty"`
}

type PromptFilterResult

type PromptFilterResult struct {
	Index                int                  `json:"index"`
	ContentFilterResults ContentFilterResults `json:"content_filter_results,omitempty"`
}

type RespAIChat

type RespAIChat struct {
	ID                  string               `json:"id"`
	Object              string               `json:"object"`
	Created             int64                `json:"created"`
	Model               string               `json:"model"`
	Choices             []ChatChoice         `json:"choices"`
	SystemFingerprint   string               `json:"system_fingerprint"`
	PromptAnnotations   []PromptAnnotation   `json:"prompt_annotations,omitempty"`
	PromptFilterResults []PromptFilterResult `json:"prompt_filter_results,omitempty"`
	Usage               *ChatUsage           `json:"usage,omitempty"`
	Error               *AIError             `json:"error,omitempty"`
}

type RespAIChatStream

type RespAIChatStream struct {
	ID                  string               `json:"id"`
	Object              string               `json:"object"`
	Created             int64                `json:"created"`
	Model               string               `json:"model"`
	Choices             []StreamChatChoice   `json:"choices"`
	SystemFingerprint   string               `json:"system_fingerprint"`
	PromptAnnotations   []PromptAnnotation   `json:"prompt_annotations,omitempty"`
	PromptFilterResults []PromptFilterResult `json:"prompt_filter_results,omitempty"`
	Usage               *ChatUsage           `json:"usage,omitempty"`
	Error               *AIError             `json:"error,omitempty"`
}

type RespOpenAI

type RespOpenAI struct {
	IsStream          bool
	EmptyMsgLineLimit int
	// contains filtered or unexported fields
}

func (*RespOpenAI) Close

func (r *RespOpenAI) Close() error

func (*RespOpenAI) Get

func (r *RespOpenAI) Get() (*RespAIChat, error)

func (*RespOpenAI) HttpStatus added in v1.0.3

func (r *RespOpenAI) HttpStatus() int

func (*RespOpenAI) Recv

func (r *RespOpenAI) Recv() (*RespAIChatStream, error)

type Role

type Role string
const (
	RoleSystem    Role = "system"
	RoleUser      Role = "user"
	RoleAssistant Role = "assistant"
)

type SelfHarm

type SelfHarm struct {
	Filtered bool   `json:"filtered"`
	Severity string `json:"severity,omitempty"`
}

type Sexual

type Sexual struct {
	Filtered bool   `json:"filtered"`
	Severity string `json:"severity,omitempty"`
}

type StreamChatChoice

type StreamChatChoice struct {
	Index        int                             `json:"index"`
	FinishReason FinishReason                    `json:"finish_reason"`
	Delta        ChatCompletionStreamChoiceDelta `json:"delta"` // A chat completion delta generated by streamed model responses.
}

https://platform.openai.com/docs/api-reference/chat/streaming

type TextContent

type TextContent string

implement MsgContent

func (TextContent) CastToTextContent

func (c TextContent) CastToTextContent() (TextContent, bool)

func (TextContent) CastToUserArrContent

func (c TextContent) CastToUserArrContent() (UserArrContent, bool)

func (TextContent) ContentType

func (c TextContent) ContentType() string

func (TextContent) MatchRole

func (c TextContent) MatchRole(role Role) bool

func (TextContent) Text

func (c TextContent) Text() []string

type ToolFunc

type ToolFunc struct {
	ID       string `json:"id"`
	Type     string `json:"type"`
	Function struct {
		Name      string `json:"name"`      // The name of the function to call.
		Arguments string `json:"arguments"` // The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function.
	} `json:"function"`
}

type UserArrContent

type UserArrContent []UserArrContentItem

implement MsgContent

func (UserArrContent) CastToTextContent

func (c UserArrContent) CastToTextContent() (TextContent, bool)

func (UserArrContent) CastToUserArrContent

func (c UserArrContent) CastToUserArrContent() (UserArrContent, bool)

func (UserArrContent) ContentType

func (c UserArrContent) ContentType() string

func (UserArrContent) MatchRole

func (c UserArrContent) MatchRole(role Role) bool

func (UserArrContent) Text

func (c UserArrContent) Text() []string

type UserArrContentItem

type UserArrContentItem interface {
	GetType() string
	Keyword() string
	GetText() string
	CastToUserArrTextContent() (UserTextContent, bool)
	CastToUserArrImgContent() (UserImgContent, bool)
}

type UserImgContent

type UserImgContent struct {
	Type     string `json:"type,omitempty"`
	ImageURL ImgURL `json:"image_url"`
}

implement UserArrContentItem

func (UserImgContent) CastToUserArrImgContent

func (i UserImgContent) CastToUserArrImgContent() (UserImgContent, bool)

func (UserImgContent) CastToUserArrTextContent

func (i UserImgContent) CastToUserArrTextContent() (UserTextContent, bool)

func (UserImgContent) GetText

func (i UserImgContent) GetText() string

func (UserImgContent) GetType

func (i UserImgContent) GetType() string

func (UserImgContent) Keyword

func (i UserImgContent) Keyword() string

type UserTextContent

type UserTextContent struct {
	Type string `json:"type,omitempty"`
	Text string `json:"text"`
}

implement UserArrContentItem

func (UserTextContent) CastToUserArrImgContent

func (i UserTextContent) CastToUserArrImgContent() (UserImgContent, bool)

func (UserTextContent) CastToUserArrTextContent

func (i UserTextContent) CastToUserArrTextContent() (UserTextContent, bool)

func (UserTextContent) GetText

func (i UserTextContent) GetText() string

func (UserTextContent) GetType

func (i UserTextContent) GetType() string

func (UserTextContent) Keyword

func (i UserTextContent) Keyword() string

type Violence

type Violence struct {
	Filtered bool   `json:"filtered"`
	Severity string `json:"severity,omitempty"`
}

Directories

Path Synopsis
example

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL