Documentation
¶
Index ¶
Constants ¶
const ( LLAMA3_0_8B_INSTRUCT = Llama3("meta.llama3-8b-instruct-v1:0") LLAMA3_0_70B_INSTRUCT = Llama3("meta.llama3-70b-instruct-v1:0") LLAMA3_1_8B_INSTRUCT = Llama3("meta.llama3-1-8b-instruct-v1:0") LLAMA3_1_70B_INSTRUCT = Llama3("meta.llama3-1-70b-instruct-v1:0") LLAMA3_1_405B_INSTRUCT = Llama3("meta.llama3-1-405b-instruct-v1:0") LLAMA3_2_1B_INSTRUCT = Llama3("meta.llama3-2-1b-instruct-v1:0") LLAMA3_2_3B_INSTRUCT = Llama3("meta.llama3-2-3b-instruct-v1:0") LLAMA3_2_11B_INSTRUCT = Llama3("meta.llama3-2-11b-instruct-v1:0") LLAMA3_2_90B_INSTRUCT = Llama3("meta.llama3-2-90b-instruct-v1:0") LLAMA3_3_70B_INSTRUCT = Llama3("meta.llama3-3-70b-instruct-v1:0") )
See model id https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html#model-ids-arns
const ( TITAN_TEXT_LITE_V1 = Titan("amazon.titan-text-lite-v1") TITAN_TEXT_EXPRESS_V1 = Titan("amazon.titan-text-express-v1") TITAN_TEXT_PREMIER_V1 = Titan("amazon.titan-text-premier-v1:0") )
See https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html
const Version = "llm/bedrock/v0.3.2"
Variables ¶
var ( // Set AWS Bedrock Foundational LLM // // This option is required. WithLLM = opts.ForType[Client, LLM]() // Use aws.Config to config the client WithConfig = opts.FMap(optsFromConfig) // Use region for aws.Config WithRegion = opts.FMap(optsFromRegion) // Set us-west-2 as default region WithDefaultRegion = WithRegion(defaultRegion) // Set AWS Bedrock Runtime WithBedrock = opts.ForType[Client, Bedrock]() )
Functions ¶
This section is empty.
Types ¶
type Bedrock ¶
type Bedrock interface {
InvokeModel(ctx context.Context, params *bedrockruntime.InvokeModelInput, optFns ...func(*bedrockruntime.Options)) (*bedrockruntime.InvokeModelOutput, error)
}
AWS Bedrock Runtime API
type Client ¶
type Client struct {
// contains filtered or unexported fields
}
AWS Bedrock client
func New ¶
Create client to AWS BedRock.
By default `us-east-1` region is used, use config options to alter behavior.
func (*Client) Prompt ¶
func (c *Client) Prompt(ctx context.Context, prompt []fmt.Stringer, opts ...chatter.Opt) (chatter.Reply, error)
Prompt the model
func (*Client) UsedInputTokens ¶
func (*Client) UsedReplyTokens ¶
type FoundationModel ¶
type FoundationModel struct { constructs.Construct // contains filtered or unexported fields }
func NewFoundationModel ¶
func NewFoundationModel(scope constructs.Construct, id *string, foundationModelId awsbedrock.FoundationModelIdentifier) *FoundationModel
func (*FoundationModel) GrantAccess ¶
func (c *FoundationModel) GrantAccess(grantee awsiam.IGrantable)
func (*FoundationModel) GrantAccessIn ¶
func (c *FoundationModel) GrantAccessIn(grantee awsiam.IGrantable, region *string)
type InferenceProfile ¶ added in v0.3.2
type InferenceProfile struct { constructs.Construct // contains filtered or unexported fields }
func NewInferenceProfile ¶ added in v0.3.2
func NewInferenceProfile(scope constructs.Construct, id *string, profile *string) *InferenceProfile
func (*InferenceProfile) GrantAccessIn ¶ added in v0.3.2
func (c *InferenceProfile) GrantAccessIn(grantee awsiam.IGrantable, region *string)
type Llama3 ¶
type Llama3 string
Meta Llama3 model family
See * https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-meta.html * https://www.llama.com/docs/model-cards-and-prompt-formats/llama-guard-3 * https://www.llama.com/docs/model-cards-and-prompt-formats/meta-llama-3/
type Titan ¶
type Titan string
Amazon Titan Text model family
See https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-titan-text.html See https://docs.aws.amazon.com/bedrock/latest/userguide/prompt-templates-and-examples.html