Documentation
¶
Index ¶
- type Provider
- func (p *Provider) Chat(ctx context.Context, req *types.ChatRequest) (*types.ChatResponse, error)
- func (p *Provider) Complete(ctx context.Context, req *types.CompletionRequest) (*types.CompletionResponse, error)
- func (p *Provider) StreamChat(ctx context.Context, req *types.ChatRequest) (<-chan *types.ChatResponse, error)
- func (p *Provider) StreamComplete(ctx context.Context, req *types.CompletionRequest) (<-chan *types.CompletionResponse, error)
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type Provider ¶
type Provider struct {
// contains filtered or unexported fields
}
Provider implements the LLM provider interface for OpenAI
func NewProvider ¶
NewProvider creates a new OpenAI provider
func (*Provider) Chat ¶
func (p *Provider) Chat(ctx context.Context, req *types.ChatRequest) (*types.ChatResponse, error)
Chat generates a chat completion for the given messages
func (*Provider) Complete ¶
func (p *Provider) Complete(ctx context.Context, req *types.CompletionRequest) (*types.CompletionResponse, error)
Complete generates a completion for the given prompt
func (*Provider) StreamChat ¶
func (p *Provider) StreamChat(ctx context.Context, req *types.ChatRequest) (<-chan *types.ChatResponse, error)
StreamChat streams a chat completion for the given messages
func (*Provider) StreamComplete ¶
func (p *Provider) StreamComplete(ctx context.Context, req *types.CompletionRequest) (<-chan *types.CompletionResponse, error)
StreamComplete streams a completion for the given prompt
Click to show internal directories.
Click to hide internal directories.