Documentation ¶
Index ¶
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func InitLoggerZap ¶
InitLoggerZap 初始化zap日志服务
会加入默认的一个模块空间,当不传参调用GetLogger()时, 就是使用默认的模块空间
当启用elk时,logger根据provider配置使用redis队列或nats publish等作为媒介,需要在logstash侧配置对应的pipeline 队列的key取决于日志文件名和appName的组合,如: 日志文件名=logs/app.log,appName=app 则,队列名称为=> app:logs/app.log
func MarshalInterfaceValue ¶
func MarshalInterfaceValue(obj interface{}) string
MarshalInterfaceValue 将interface序列化成字符串
主要用于日志记录
Types ¶
type Conf ¶
type Conf struct { Env string `yaml:"env" toml:"env" json:"env" default:"prod"` //日志环境,prod:生产环境,dev:开发环境 Level string `yaml:"level" toml:"level" json:"level" default:"info"` //日志级别,debug,info,warn,error Modules []string `yaml:"modules" toml:"modules" json:"modules"` //模块名称(日志记录到不同的文件中) Filename string `yaml:"filename" toml:"filename" json:"filename" default:"logs/running.log"` //日志文件名称 MaxSize int `yaml:"max_size" toml:"max_size" json:"max_size" default:"100"` //日志大小限制,单位MB MaxBackups int `yaml:"max_backups" toml:"max_backups" json:"max_backups" default:"10"` //最大历史文件保留数量 Compress bool `yaml:"compress" toml:"compress" json:"compress" default:"true"` //是否压缩历史日志文件 Exporter struct { Provider string `yaml:"provider" toml:"provider" json:"provider" default:""` //导出器,目前支持redis、redis-cluster、nats和kafka Redis struct { ListKey string `yaml:"list_key" toml:"list_key" json:"list_key"` //redis list的elk日志写入的key ConnConf redis.Conf `yaml:"conn_conf" toml:"conn_conf" json:"conn_conf"` //redis连接配置(单机) ClusterConnConf redis.ClusterConf `yaml:"cluster_conn_conf" toml:"cluster_conn_conf" json:"cluster_conn_conf"` //redis连接配置(集群) } `yaml:"redis" toml:"redis" json:"redis"` Nats struct { Subject string `yaml:"subject" toml:"subject" json:"subject"` //nats的发布主题 ConnConf nats.Conf `yaml:"conn_conf" toml:"conn_conf" json:"conn_conf"` //nats连接配置 } `yaml:"nats" toml:"nats" json:"nats"` Kafka struct { Topic string `yaml:"topic" toml:"topic" json:"topic"` //kafka的发布主题 ConnConf kafka.Conf `yaml:"conn_conf" toml:"conn_conf" json:"conn_conf"` //kafka连接配置 } `yaml:"kafka" toml:"kafka" json:"kafka"` } `yaml:"exporter" toml:"exporter" json:"exporter"` //导出器 }
Conf 日志配置
<yaml example>
logger:
env: dev level: debug modules: - db - schedule filename: logs/user_running.log max_size: 100 max_backups: 10 compress: true exporter: provider: "redis-cluster" nats: subject: "logger" conn_conf: servers: - "nats://192.168.134.116:4222" username: admin password: changeme kafka: topic: "logger" conn_conf: addrList: - "localhost:9092" username: admin password: changeme redis: list_key: "go-sail-user:logger" conn_conf: addr: host: "" port: 0 username: "" password: "" database: 0 ssl_enable: false cluster_conn_conf: ssl_enable: false addr_list: - host: 192.168.224.114 port: 6379 username: "" password: 123456 - host: 192.168.224.114 port: 6380 username: "" password: 123456
<toml example>
::zap日志组件配置 v2:: ¶
日志环境 dev,prod ¶
env = "dev"
日志级别 debug,info,warn,error,dpanic,panic,fatal ¶
level = "info"
模块名称 ¶
modules = ["db", "schedule"]
日志文件名称 需要跟上路径 ¶
filename = "logs/running.log"
单文件日志大小限制,单位MB ¶
max_size = 100
最大历史文件保留数量 ¶
max_backups = 10
是否压缩历史文件 ¶
compress = true
日志导出器配置 ¶
[logger.exporter]
日志导出器介质 ¶
provider = "redis"
nats导出器配置 ¶
[logger.exporter.nats]
nats主题 ¶
subject = "logger"
kafka导出器配置 ¶
[logger.exporter.kafka]
kafka主题 ¶
topic = "logger"
redis导出器配置 ¶
[logger.exporter.redis]
list键名 ¶
list_key = "logger"
[logger.exporter.redis.conn_conf]
host = "localhost"
username = ""
port = 6379
password = ""
database = 0
ssl_enable = false
[logger.exporter.redis.cluster_conn_conf]
[[logger.exporter.redis.cluster_conn_conf.addr_list]]
host = "localhost"
username = ""
port = 6380
password = ""
[[logger.exporter.redis.cluster_conn_conf.addr_list]]
host = "localhost"
username = ""
port = 6381
password = ""