Documentation
¶
Index ¶
- Constants
- Variables
- type ForwardingRule
- func (x *ForwardingRule) ClearTopicPrefix()
- func (x *ForwardingRule) ClearTrigger()
- func (x *ForwardingRule) GetTargetCluster() string
- func (x *ForwardingRule) GetTopicPrefix() string
- func (x *ForwardingRule) GetTrigger() isForwardingRule_Trigger
- func (x *ForwardingRule) HasTopicPrefix() bool
- func (x *ForwardingRule) HasTrigger() bool
- func (*ForwardingRule) ProtoMessage()
- func (x *ForwardingRule) ProtoReflect() protoreflect.Message
- func (x *ForwardingRule) Reset()
- func (x *ForwardingRule) SetTargetCluster(v string)
- func (x *ForwardingRule) SetTopicPrefix(v string)
- func (x *ForwardingRule) String() string
- func (x *ForwardingRule) WhichTrigger() case_ForwardingRule_Trigger
- type ForwardingRule_TopicPrefix
- type ForwardingRule_builder
- type KafkaClusterDefinition
- func (x *KafkaClusterDefinition) GetBootstrapServers() string
- func (x *KafkaClusterDefinition) GetClusterName() string
- func (x *KafkaClusterDefinition) GetConsumerConfig() map[string]string
- func (x *KafkaClusterDefinition) GetPartitionCount() int32
- func (x *KafkaClusterDefinition) GetProducerConfig() map[string]string
- func (*KafkaClusterDefinition) ProtoMessage()
- func (x *KafkaClusterDefinition) ProtoReflect() protoreflect.Message
- func (x *KafkaClusterDefinition) Reset()
- func (x *KafkaClusterDefinition) SetBootstrapServers(v string)
- func (x *KafkaClusterDefinition) SetClusterName(v string)
- func (x *KafkaClusterDefinition) SetConsumerConfig(v map[string]string)
- func (x *KafkaClusterDefinition) SetPartitionCount(v int32)
- func (x *KafkaClusterDefinition) SetProducerConfig(v map[string]string)
- func (x *KafkaClusterDefinition) String() string
- type KafkaClusterDefinition_builder
- type KafkaMesh
- func (x *KafkaMesh) GetAdvertisedHost() string
- func (x *KafkaMesh) GetAdvertisedPort() int32
- func (x *KafkaMesh) GetConsumerProxyMode() KafkaMesh_ConsumerProxyMode
- func (x *KafkaMesh) GetForwardingRules() []*ForwardingRule
- func (x *KafkaMesh) GetUpstreamClusters() []*KafkaClusterDefinition
- func (*KafkaMesh) ProtoMessage()
- func (x *KafkaMesh) ProtoReflect() protoreflect.Message
- func (x *KafkaMesh) Reset()
- func (x *KafkaMesh) SetAdvertisedHost(v string)
- func (x *KafkaMesh) SetAdvertisedPort(v int32)
- func (x *KafkaMesh) SetConsumerProxyMode(v KafkaMesh_ConsumerProxyMode)
- func (x *KafkaMesh) SetForwardingRules(v []*ForwardingRule)
- func (x *KafkaMesh) SetUpstreamClusters(v []*KafkaClusterDefinition)
- func (x *KafkaMesh) String() string
- type KafkaMesh_ConsumerProxyMode
- func (KafkaMesh_ConsumerProxyMode) Descriptor() protoreflect.EnumDescriptor
- func (x KafkaMesh_ConsumerProxyMode) Enum() *KafkaMesh_ConsumerProxyMode
- func (x KafkaMesh_ConsumerProxyMode) Number() protoreflect.EnumNumber
- func (x KafkaMesh_ConsumerProxyMode) String() string
- func (KafkaMesh_ConsumerProxyMode) Type() protoreflect.EnumType
- type KafkaMesh_builder
Constants ¶
View Source
const ForwardingRule_TopicPrefix_case case_ForwardingRule_Trigger = 2
View Source
const ForwardingRule_Trigger_not_set_case case_ForwardingRule_Trigger = 0
Variables ¶
View Source
var ( KafkaMesh_ConsumerProxyMode_name = map[int32]string{ 0: "StatefulConsumerProxy", } KafkaMesh_ConsumerProxyMode_value = map[string]int32{ "StatefulConsumerProxy": 0, } )
Enum value maps for KafkaMesh_ConsumerProxyMode.
View Source
var File_contrib_envoy_extensions_filters_network_kafka_mesh_v3alpha_kafka_mesh_proto protoreflect.FileDescriptor
Functions ¶
This section is empty.
Types ¶
type ForwardingRule ¶
type ForwardingRule struct {
// Cluster name.
TargetCluster string `protobuf:"bytes,1,opt,name=target_cluster,json=targetCluster,proto3" json:"target_cluster,omitempty"`
// Types that are valid to be assigned to Trigger:
//
// *ForwardingRule_TopicPrefix
Trigger isForwardingRule_Trigger `protobuf_oneof:"trigger"`
// contains filtered or unexported fields
}
func (*ForwardingRule) ClearTopicPrefix ¶
func (x *ForwardingRule) ClearTopicPrefix()
func (*ForwardingRule) ClearTrigger ¶
func (x *ForwardingRule) ClearTrigger()
func (*ForwardingRule) GetTargetCluster ¶
func (x *ForwardingRule) GetTargetCluster() string
func (*ForwardingRule) GetTopicPrefix ¶
func (x *ForwardingRule) GetTopicPrefix() string
func (*ForwardingRule) GetTrigger ¶
func (x *ForwardingRule) GetTrigger() isForwardingRule_Trigger
func (*ForwardingRule) HasTopicPrefix ¶
func (x *ForwardingRule) HasTopicPrefix() bool
func (*ForwardingRule) HasTrigger ¶
func (x *ForwardingRule) HasTrigger() bool
func (*ForwardingRule) ProtoMessage ¶
func (*ForwardingRule) ProtoMessage()
func (*ForwardingRule) ProtoReflect ¶
func (x *ForwardingRule) ProtoReflect() protoreflect.Message
func (*ForwardingRule) Reset ¶
func (x *ForwardingRule) Reset()
func (*ForwardingRule) SetTargetCluster ¶
func (x *ForwardingRule) SetTargetCluster(v string)
func (*ForwardingRule) SetTopicPrefix ¶
func (x *ForwardingRule) SetTopicPrefix(v string)
func (*ForwardingRule) String ¶
func (x *ForwardingRule) String() string
func (*ForwardingRule) WhichTrigger ¶
func (x *ForwardingRule) WhichTrigger() case_ForwardingRule_Trigger
type ForwardingRule_TopicPrefix ¶
type ForwardingRule_TopicPrefix struct {
// Intended place for future types of forwarding rules.
TopicPrefix string `protobuf:"bytes,2,opt,name=topic_prefix,json=topicPrefix,proto3,oneof"`
}
type ForwardingRule_builder ¶
type ForwardingRule_builder struct {
// Cluster name.
TargetCluster string
// Fields of oneof Trigger:
// Intended place for future types of forwarding rules.
TopicPrefix *string
// contains filtered or unexported fields
}
func (ForwardingRule_builder) Build ¶
func (b0 ForwardingRule_builder) Build() *ForwardingRule
type KafkaClusterDefinition ¶
type KafkaClusterDefinition struct {
// Cluster name.
ClusterName string `protobuf:"bytes,1,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"`
// Kafka cluster address.
BootstrapServers string `protobuf:"bytes,2,opt,name=bootstrap_servers,json=bootstrapServers,proto3" json:"bootstrap_servers,omitempty"`
// Default number of partitions present in this cluster.
// This is especially important for clients that do not specify partition in their payloads and depend on this value for hashing.
// The same number of partitions is going to be used by upstream-pointing Kafka consumers for consumer proxying scenarios.
PartitionCount int32 `protobuf:"varint,3,opt,name=partition_count,json=partitionCount,proto3" json:"partition_count,omitempty"`
// Custom configuration passed to Kafka producer.
ProducerConfig map[string]string `` /* 177-byte string literal not displayed */
// Custom configuration passed to Kafka consumer.
ConsumerConfig map[string]string `` /* 177-byte string literal not displayed */
// contains filtered or unexported fields
}
[#next-free-field: 6]
func (*KafkaClusterDefinition) GetBootstrapServers ¶
func (x *KafkaClusterDefinition) GetBootstrapServers() string
func (*KafkaClusterDefinition) GetClusterName ¶
func (x *KafkaClusterDefinition) GetClusterName() string
func (*KafkaClusterDefinition) GetConsumerConfig ¶
func (x *KafkaClusterDefinition) GetConsumerConfig() map[string]string
func (*KafkaClusterDefinition) GetPartitionCount ¶
func (x *KafkaClusterDefinition) GetPartitionCount() int32
func (*KafkaClusterDefinition) GetProducerConfig ¶
func (x *KafkaClusterDefinition) GetProducerConfig() map[string]string
func (*KafkaClusterDefinition) ProtoMessage ¶
func (*KafkaClusterDefinition) ProtoMessage()
func (*KafkaClusterDefinition) ProtoReflect ¶
func (x *KafkaClusterDefinition) ProtoReflect() protoreflect.Message
func (*KafkaClusterDefinition) Reset ¶
func (x *KafkaClusterDefinition) Reset()
func (*KafkaClusterDefinition) SetBootstrapServers ¶
func (x *KafkaClusterDefinition) SetBootstrapServers(v string)
func (*KafkaClusterDefinition) SetClusterName ¶
func (x *KafkaClusterDefinition) SetClusterName(v string)
func (*KafkaClusterDefinition) SetConsumerConfig ¶
func (x *KafkaClusterDefinition) SetConsumerConfig(v map[string]string)
func (*KafkaClusterDefinition) SetPartitionCount ¶
func (x *KafkaClusterDefinition) SetPartitionCount(v int32)
func (*KafkaClusterDefinition) SetProducerConfig ¶
func (x *KafkaClusterDefinition) SetProducerConfig(v map[string]string)
func (*KafkaClusterDefinition) String ¶
func (x *KafkaClusterDefinition) String() string
type KafkaClusterDefinition_builder ¶
type KafkaClusterDefinition_builder struct {
// Cluster name.
ClusterName string
// Kafka cluster address.
BootstrapServers string
// Default number of partitions present in this cluster.
// This is especially important for clients that do not specify partition in their payloads and depend on this value for hashing.
// The same number of partitions is going to be used by upstream-pointing Kafka consumers for consumer proxying scenarios.
PartitionCount int32
// Custom configuration passed to Kafka producer.
ProducerConfig map[string]string
// Custom configuration passed to Kafka consumer.
ConsumerConfig map[string]string
// contains filtered or unexported fields
}
func (KafkaClusterDefinition_builder) Build ¶
func (b0 KafkaClusterDefinition_builder) Build() *KafkaClusterDefinition
type KafkaMesh ¶
type KafkaMesh struct {
// Envoy's host that's advertised to clients.
// Has the same meaning as corresponding Kafka broker properties.
// Usually equal to filter chain's listener config, but needs to be reachable by clients
// (so 0.0.0.0 will not work).
AdvertisedHost string `protobuf:"bytes,1,opt,name=advertised_host,json=advertisedHost,proto3" json:"advertised_host,omitempty"`
// Envoy's port that's advertised to clients.
AdvertisedPort int32 `protobuf:"varint,2,opt,name=advertised_port,json=advertisedPort,proto3" json:"advertised_port,omitempty"`
// Upstream clusters this filter will connect to.
UpstreamClusters []*KafkaClusterDefinition `protobuf:"bytes,3,rep,name=upstream_clusters,json=upstreamClusters,proto3" json:"upstream_clusters,omitempty"`
// Rules that will decide which cluster gets which request.
ForwardingRules []*ForwardingRule `protobuf:"bytes,4,rep,name=forwarding_rules,json=forwardingRules,proto3" json:"forwarding_rules,omitempty"`
// How the consumer proxying should behave - this relates mostly to Fetch request handling.
ConsumerProxyMode KafkaMesh_ConsumerProxyMode `` /* 200-byte string literal not displayed */
// contains filtered or unexported fields
}
[#next-free-field: 6]
func (*KafkaMesh) GetConsumerProxyMode ¶
func (x *KafkaMesh) GetConsumerProxyMode() KafkaMesh_ConsumerProxyMode
func (*KafkaMesh) GetForwardingRules ¶
func (x *KafkaMesh) GetForwardingRules() []*ForwardingRule
func (*KafkaMesh) GetUpstreamClusters ¶
func (x *KafkaMesh) GetUpstreamClusters() []*KafkaClusterDefinition
func (*KafkaMesh) ProtoReflect ¶
func (x *KafkaMesh) ProtoReflect() protoreflect.Message
func (*KafkaMesh) SetConsumerProxyMode ¶
func (x *KafkaMesh) SetConsumerProxyMode(v KafkaMesh_ConsumerProxyMode)
func (*KafkaMesh) SetForwardingRules ¶
func (x *KafkaMesh) SetForwardingRules(v []*ForwardingRule)
func (*KafkaMesh) SetUpstreamClusters ¶
func (x *KafkaMesh) SetUpstreamClusters(v []*KafkaClusterDefinition)
type KafkaMesh_ConsumerProxyMode ¶
type KafkaMesh_ConsumerProxyMode int32
const ( // Records received are going to be distributed amongst downstream consumer connections. // In this mode Envoy uses librdkafka consumers pointing at upstream Kafka clusters, what means that these // consumers' position is meaningful and affects what records are received from upstream. // Users might want to take a look into these consumers' custom configuration to manage their auto-committing // capabilities, as it will impact Envoy's behaviour in case of restarts. KafkaMesh_StatefulConsumerProxy KafkaMesh_ConsumerProxyMode = 0 )
func (KafkaMesh_ConsumerProxyMode) Descriptor ¶
func (KafkaMesh_ConsumerProxyMode) Descriptor() protoreflect.EnumDescriptor
func (KafkaMesh_ConsumerProxyMode) Enum ¶
func (x KafkaMesh_ConsumerProxyMode) Enum() *KafkaMesh_ConsumerProxyMode
func (KafkaMesh_ConsumerProxyMode) Number ¶
func (x KafkaMesh_ConsumerProxyMode) Number() protoreflect.EnumNumber
func (KafkaMesh_ConsumerProxyMode) String ¶
func (x KafkaMesh_ConsumerProxyMode) String() string
func (KafkaMesh_ConsumerProxyMode) Type ¶
func (KafkaMesh_ConsumerProxyMode) Type() protoreflect.EnumType
type KafkaMesh_builder ¶
type KafkaMesh_builder struct {
// Envoy's host that's advertised to clients.
// Has the same meaning as corresponding Kafka broker properties.
// Usually equal to filter chain's listener config, but needs to be reachable by clients
// (so 0.0.0.0 will not work).
AdvertisedHost string
// Envoy's port that's advertised to clients.
AdvertisedPort int32
// Upstream clusters this filter will connect to.
UpstreamClusters []*KafkaClusterDefinition
// Rules that will decide which cluster gets which request.
ForwardingRules []*ForwardingRule
// How the consumer proxying should behave - this relates mostly to Fetch request handling.
ConsumerProxyMode KafkaMesh_ConsumerProxyMode
// contains filtered or unexported fields
}
func (KafkaMesh_builder) Build ¶
func (b0 KafkaMesh_builder) Build() *KafkaMesh
Source Files
¶
- kafka_mesh.pb.go
Click to show internal directories.
Click to hide internal directories.