config

package
v1.8.3 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Feb 24, 2021 License: Apache-2.0 Imports: 13 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

View Source
var (
	// SinkerReleaseVersion information.
	SinkerReleaseVersion = "None"
	SinkerEdition        = "None"
	SinkerGitHash        = "None"
	SinkerGitBranch      = "None"
	SinkerBuildTS        = "None"
)

Functions

func GetSinkerInfo added in v1.8.3

func GetSinkerInfo() string

Types

type ClickHouseConfig

type ClickHouseConfig struct {
	DB    string
	Hosts [][]string
	Port  int

	Username   string
	Password   string
	DsnParams  string
	RetryTimes int //<=0 means retry infinitely
}

ClickHouseConfig configuration parameters

type Config

type Config struct {
	Kafka      KafkaConfig
	Clickhouse ClickHouseConfig
	Task       TaskConfig
	LogLevel   string
}

Config struct used for different configurations use

func ParseLocalCfgFile

func ParseLocalCfgFile(cfgPath string) (cfg *Config, err error)

func (*Config) Normallize

func (cfg *Config) Normallize() (err error)

normallize and validate configuration

type KafkaConfig

type KafkaConfig struct {
	Brokers string
	Version string
	TLS     struct {
		Enable         bool
		CaCertFiles    string // Required. It's the CA cert.pem with which Kafka brokers certs be signed.
		ClientCertFile string // Required for client authentication. It's client cert.pem.
		ClientKeyFile  string // Required if and only if ClientCertFile is present. It's client key.pem.

		TrustStoreLocation string //JKS format of CA certificate, used to extract CA cert.pem.
		TrustStorePassword string
		KeystoreLocation   string //JKS format of client certificate and key, used to extrace client cert.pem and key.pem.
		KeystorePassword   string
		EndpIdentAlgo      string
	}
	//simplified sarama.Config.Net.SASL to only support SASL/PLAIN and SASL/GSSAPI(Kerberos)
	Sasl struct {
		// Whether or not to use SASL authentication when connecting to the broker
		// (defaults to false).
		Enable bool
		// Mechanism is the name of the enabled SASL mechanism.
		// Possible values: PLAIN, SCRAM-SHA-256, SCRAM-SHA-512, GSSAPI (defaults to PLAIN)
		Mechanism string
		// Username is the authentication identity (authcid) to present for
		// SASL/PLAIN or SASL/SCRAM authentication
		Username string
		// Password for SASL/PLAIN or SASL/SCRAM authentication
		Password string
		GSSAPI   struct {
			AuthType           int //1. KRB5_USER_AUTH, 2. KRB5_KEYTAB_AUTH
			KeyTabPath         string
			KerberosConfigPath string
			ServiceName        string
			Username           string
			Password           string
			Realm              string
			DisablePAFXFAST    bool
		}
	}
}

KafkaConfig configuration parameters

type NacosConfManager added in v1.7.1

type NacosConfManager struct {
	// contains filtered or unexported fields
}

func (*NacosConfManager) GetConfig added in v1.7.1

func (ncm *NacosConfManager) GetConfig() (conf *Config, err error)

func (*NacosConfManager) Init added in v1.7.1

func (ncm *NacosConfManager) Init(properties map[string]interface{}) (err error)

func (*NacosConfManager) PublishConfig added in v1.7.1

func (ncm *NacosConfManager) PublishConfig(conf *Config) (err error)

type RemoteConfManager added in v1.7.1

type RemoteConfManager interface {
	Init(properties map[string]interface{}) error
	// GetConfig fetchs the config. The manager shall not reference the returned Config object after call.
	GetConfig() (conf *Config, err error)
	// PublishConfig publishs the config.
	PublishConfig(conf *Config) (err error)
}

RemoteConfManager can be implemented by many backends: Nacos, Consul, etcd, ZooKeeper...

type TaskConfig

type TaskConfig struct {
	Name string

	KafkaClient   string
	Topic         string
	ConsumerGroup string

	// Earliest set to true to consume the message from oldest position
	Earliest bool
	Parser   string
	// the csv cloum title if Parser is csv
	CsvFormat []string
	Delimiter string

	TableName string

	// AutoSchema will auto fetch the schema from clickhouse
	AutoSchema     bool
	ExcludeColumns []string
	Dims           []struct {
		Name       string
		Type       string
		SourceName string
	} `json:"dims"`
	// DynamicSchema will add columns present in message to clickhouse. Requires AutoSchema be true.
	DynamicSchema struct {
		Enable        bool
		Cluster       string
		DistTblPrefix string
		MaxDims       int // the upper limit of dynamic columns number, <=0 means math.MaxInt16. protecting dirty data attack
	}

	// ShardingKey is the column name to which sharding against
	ShardingKey string `json:"shardingKey,omitempty"`
	// ShardingPolicy is `stripe,<interval>`(requires ShardingKey be numerical) or `hash`(requires ShardingKey be string)
	ShardingPolicy string `json:"shardingPolicy,omitempty"`

	FlushInterval    int    `json:"flushInterval,omitempty"`
	BufferSize       int    `json:"bufferSize,omitempty"`
	MinBufferSize    int    `json:"minBufferSize,omitempty"`
	MsgSizeHint      int    `json:"msgSizeHint,omitempty"`
	LayoutDate       string `json:"layoutDate,omitempty"`
	LayoutDateTime   string `json:"layoutDateTime,omitempty"`
	LayoutDateTime64 string `json:"layoutDateTime64,omitempty"`
	TimeZone         string `json:"timezone"`
}

Task configuration parameters

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL