Bladeren bron

Add Hostname Field @2020-06-11 14:12

zry 3 jaren geleden
bovenliggende
commit
b03f1d6a12

+ 101 - 0
hieda_ginutil/logger.go

@@ -0,0 +1,101 @@
+package hieda_ginutil
+
+import (
+	"fmt"
+	"git.swzry.com/zry/GoHiedaLogger/hiedalog"
+	"github.com/gin-gonic/gin"
+	"strconv"
+	"time"
+)
+
+type GinLoggerConfig struct {
+	Logger       *hiedalog.HiedaLogger
+	ModuleName   string
+	LevelMapFunc func(int) string
+}
+
+func GinLoggerWithStringLogger(config GinLoggerConfig) gin.HandlerFunc {
+	return func(c *gin.Context) {
+		start := time.Now()
+		path := c.Request.URL.Path
+		raw := c.Request.URL.RawQuery
+		c.Next()
+		end := time.Now()
+		latency := end.Sub(start)
+		clientIP := c.ClientIP()
+		method := c.Request.Method
+		statusCode := c.Writer.Status()
+		lvs := config.LevelMapFunc(statusCode)
+		// comment := c.Errors.ByType(gin.ErrorTypePrivate).String()
+
+		if raw != "" {
+			path = path + "?" + raw
+		}
+
+		sv := fmt.Sprintf("%v | %3d | %13v | %15s | %-7s %s",
+			end.Format("2006/01/02 - 15:04:05"),
+			statusCode,
+			latency,
+			clientIP,
+			method,
+			path,
+		)
+
+		config.Logger.LogString(config.ModuleName, lvs, sv)
+	}
+}
+
+func GinLoggerWithComplexLogger(config GinLoggerConfig) gin.HandlerFunc {
+	return func(c *gin.Context) {
+		start := time.Now()
+		path := c.Request.URL.Path
+		raw := c.Request.URL.RawQuery
+		c.Next()
+		end := time.Now()
+		latency := end.Sub(start)
+		clientIP := c.ClientIP()
+		method := c.Request.Method
+		statusCode := c.Writer.Status()
+		lvs := config.LevelMapFunc(statusCode)
+		comment := c.Errors.ByType(gin.ErrorTypePrivate).String()
+
+		if raw != "" {
+			path = path + "?" + raw
+		}
+
+		mv := map[string]string{
+			"time":     end.Format("2006/01/02 - 15:04:05"),
+			"status":   strconv.Itoa(statusCode),
+			"latency":  fmt.Sprintf("%13v", latency),
+			"clientIP": clientIP,
+			"method":   method,
+			"path":     path,
+			"comment":  comment,
+		}
+
+		config.Logger.LogComplex(config.ModuleName, lvs, mv)
+	}
+}
+
+func GetSimpleLevelMapFunc(Http1xx, Http2xx, Http3xx, Http4xx, Http5xx, Default string) func(int) string {
+	return func(code int) string {
+		switch {
+		case code >= 100 && code < 200:
+			return Http1xx
+		case code >= 200 && code < 300:
+			return Http2xx
+		case code >= 300 && code < 400:
+			return Http3xx
+		case code >= 400 && code < 500:
+			return Http4xx
+		case code >= 500 && code < 600:
+			return Http5xx
+		default:
+			return Default
+		}
+	}
+}
+
+func GetDefaultLevelMapFunc() func(int) string {
+	return GetSimpleLevelMapFunc(hiedalog.DLN_INFO, hiedalog.DLN_INFO, hiedalog.DLN_INFO, hiedalog.DLN_WARN, hiedalog.DLN_ERROR, hiedalog.DLN_ERROR)
+}

+ 80 - 0
hieda_yamlutil/simple_console_backend.go

@@ -0,0 +1,80 @@
+package hieda_yamlutil
+
+import (
+	"fmt"
+	"git.swzry.com/zry/GoHiedaLogger/hiedalog"
+	"github.com/ttacon/chalk"
+	"io"
+	"strings"
+)
+
+type SimpleConsoleBackend struct {
+	wr            io.Writer
+	s_fc_json_key func(string) string
+	s_fc_json_val func(string) string
+	s_lvd         []func(string) string
+}
+
+func NewSimpleConsoleBackend(wr io.Writer) *SimpleConsoleBackend {
+	scb := &SimpleConsoleBackend{
+		wr:            wr,
+		s_fc_json_key: chalk.White.NewStyle().WithTextStyle(chalk.Bold).Style,
+		s_fc_json_val: chalk.White.NewStyle().WithTextStyle(chalk.Italic).WithTextStyle(chalk.Underline).Style,
+		s_lvd: []func(string) string{
+			chalk.Red.NewStyle().WithTextStyle(chalk.Bold).WithTextStyle(chalk.Underline).Style,
+			chalk.Red.NewStyle().WithTextStyle(chalk.Bold).Style,
+			chalk.Red.NewStyle().Style,
+			chalk.Yellow.NewStyle().Style,
+			chalk.Green.NewStyle().Style,
+			chalk.Cyan.NewStyle().Style,
+			chalk.Blue.NewStyle().Style,
+		},
+	}
+	return scb
+}
+
+func (b *SimpleConsoleBackend) EmitStringLog(module string, level hiedalog.HiedaLogLevel, content string) {
+	var lv string
+	if level.LevelNumber < 7 {
+		lv = b.s_lvd[level.LevelNumber](level.Name)
+	} else {
+		lv = level.Name
+	}
+	_, _ = fmt.Fprintf(b.wr, "<%s> [%s] %s\n", lv, module, content)
+}
+
+func (b *SimpleConsoleBackend) EmitComplexLog(module string, level hiedalog.HiedaLogLevel, data map[string]string) {
+	b.EmitStringLog(module, level, b.prettyComplexToString(data))
+}
+
+func (b *SimpleConsoleBackend) prettyComplexToString(data map[string]string) string {
+	sb := strings.Builder{}
+	scn := 0
+	xr := false
+	if len(data) < 5 {
+		for k, v := range data {
+			scn += len(k) + len(v) + 2
+			if scn > 160 {
+				xr = true
+				break
+			}
+			sb.WriteString(b.s_fc_json_key(k))
+			sb.WriteRune(':')
+			sb.WriteString(b.s_fc_json_val(v))
+			sb.WriteString(", ")
+		}
+		if !xr {
+			so := sb.String()
+			return so[:len(so)-2]
+		}
+	}
+	sb = strings.Builder{}
+	sb.WriteRune('\n')
+	for k, v := range data {
+		sb.WriteString(b.s_fc_json_key(k))
+		sb.WriteRune(':')
+		sb.WriteString(b.s_fc_json_val(v))
+		sb.WriteRune('\n')
+	}
+	return sb.String()
+}

+ 70 - 0
hieda_yamlutil/yaml_def.go

@@ -0,0 +1,70 @@
+package hieda_yamlutil
+
+type CommonLogConfigYAML_BKE_AliSLS_ExtendCfg struct {
+	TotalSizeLnBytes      int64  `yaml:"total_size_ln_bytes"`
+	MaxIoWorkerCount      int64  `yaml:"max_io_worker_count"`
+	MaxBlockSec           int    `yaml:"max_block_sec"`
+	MaxBatchSize          int64  `yaml:"max_batch_size"`
+	MaxBatchCount         int    `yaml:"max_batch_count"`
+	LingerMs              int64  `yaml:"linger_ms"`
+	Retries               int    `yaml:"retries"`
+	MaxReservedAttempts   int    `yaml:"max_reserved_attempts"`
+	BaseRetryBackoffMs    int64  `yaml:"base_retry_backoff_ms"`
+	MaxRetryBackoffMs     int64  `yaml:"max_retry_backoff_ms"`
+	AdjustShargHash       bool   `yaml:"adjust_sharg_hash"`
+	Buckets               int    `yaml:"buckets"`
+	AllowLogLevel         string `yaml:"allow_log_level"`
+	LogFileName           string `yaml:"log_file_name"`
+	IsJsonType            bool   `yaml:"is_json_type"`
+	LogMaxSize            int    `yaml:"log_max_size"`
+	LogMaxBackups         int    `yaml:"log_max_backups"`
+	LogCompress           bool   `yaml:"log_compress"`
+	NoRetryStatusCodeList []int  `yaml:"no_retry_status_code_list"`
+}
+
+type CommonLogConfigYAML_BKE_AliSLS struct {
+	EnableAliSLSProducerDebug bool                                      `yaml:"alisls_producer_debug"`
+	Hostname                  string                                    `yaml:"hostname"`
+	AppName                   string                                    `yaml:"appname"`
+	Appkey                    string                                    `yaml:"ak_id"`
+	Secret                    string                                    `yaml:"ak_secret"`
+	Endpoint                  string                                    `yaml:"endpoint"`
+	Project                   string                                    `yaml:"project"`
+	Logstore                  string                                    `yaml:"logstore"`
+	Topic                     string                                    `yaml:"topic"`
+	EmitIP                    string                                    `yaml:"emit_ip"`
+	OverrideWithRealIP        bool                                      `yaml:"override_with_real_ip"`
+	ExtendAliSLSConfig        *CommonLogConfigYAML_BKE_AliSLS_ExtendCfg `yaml:"extend_alisls_config"`
+}
+
+type CommonLogConfigYAML_BKE_Console struct {
+	To string `yaml:"to"`
+}
+
+type CommonLogConfigYAML_BKE_File_RotateConfig struct {
+	MaxSize    int  `yaml:"maxsize"`
+	MaxAge     int  `yaml:"maxage"`
+	MaxBackups int  `yaml:"maxbackups"`
+	LocalTime  bool `yaml:"localtime"`
+	Compress   bool `yaml:"compress"`
+}
+
+type CommonLogConfigYAML_BKE_File struct {
+	Filename     string                                     `yaml:"filename"`
+	FVConsole    bool                                       `yaml:"fvconsole"`
+	Rotate       bool                                       `yaml:"rotate"`
+	RotateConfig *CommonLogConfigYAML_BKE_File_RotateConfig `yaml:"rotate_config"`
+}
+
+type CommonLogConfigYAML_Backend struct {
+	Type          string                           `yaml:"type"`
+	Level         string                           `yaml:"level"`
+	ConsoleConfig *CommonLogConfigYAML_BKE_Console `yaml:"console_config"`
+	FileConfig    *CommonLogConfigYAML_BKE_File    `yaml:"file_config"`
+	AliSLSConfig  *CommonLogConfigYAML_BKE_AliSLS  `yaml:"alisls_config"`
+}
+
+type CommonLogConfigYAML struct {
+	Enable   bool                                   `yaml:"enable"`
+	Backends map[string]CommonLogConfigYAML_Backend `yaml:"backends"`
+}

+ 184 - 0
hieda_yamlutil/yamlutil.go

@@ -0,0 +1,184 @@
+package hieda_yamlutil
+
+import "C"
+import (
+	"fmt"
+	"git.swzry.com/zry/GoHiedaLogger/hiedabke_alisls"
+	"git.swzry.com/zry/GoHiedaLogger/hiedabke_writer"
+	"git.swzry.com/zry/GoHiedaLogger/hiedalog"
+	"gopkg.in/natefinch/lumberjack.v2"
+	"io"
+	"os"
+)
+
+func (hyu *HiedaLogYamlUtil) parseConsoleBackend(backend CommonLogConfigYAML_Backend) (hiedalog.HiedaLogBackend, error) {
+	if backend.ConsoleConfig == nil {
+		return nil, fmt.Errorf("no field named 'console_config' for backend type 'console'")
+	}
+	switch backend.ConsoleConfig.To {
+	case "stdout":
+		return NewSimpleConsoleBackend(os.Stdout), nil
+	case "stderr":
+		return NewSimpleConsoleBackend(os.Stderr), nil
+	default:
+		return nil, fmt.Errorf("invalid target '%s' for 'to' field of 'console_config'", backend.ConsoleConfig.To)
+	}
+}
+
+func (hyu *HiedaLogYamlUtil) parseFileBackend(backend CommonLogConfigYAML_Backend) (hiedalog.HiedaLogBackend, error) {
+	if backend.FileConfig == nil {
+		return nil, fmt.Errorf("no field named 'file_config' for backend type 'file'")
+	}
+	var iof io.Writer
+	if backend.FileConfig.Rotate {
+		if backend.FileConfig.RotateConfig == nil {
+			return nil, fmt.Errorf("rotate enabled but no field named 'rotate_config'")
+		}
+		iof = &lumberjack.Logger{
+			Filename:   backend.FileConfig.Filename,
+			MaxSize:    backend.FileConfig.RotateConfig.MaxSize,
+			MaxAge:     backend.FileConfig.RotateConfig.MaxAge,
+			MaxBackups: backend.FileConfig.RotateConfig.MaxBackups,
+			LocalTime:  backend.FileConfig.RotateConfig.LocalTime,
+			Compress:   backend.FileConfig.RotateConfig.Compress,
+		}
+	} else {
+		var err error
+		iof, err = os.OpenFile(backend.FileConfig.Filename, os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0644)
+		if err != nil {
+			return nil, err
+		}
+	}
+	if backend.FileConfig.FVConsole {
+		return NewSimpleConsoleBackend(iof), nil
+	}
+	return hiedabke_writer.NewHiedaBackendWriter(iof), nil
+}
+
+func (hyu *HiedaLogYamlUtil) parseAlislsBackend(backend CommonLogConfigYAML_Backend) (hiedalog.HiedaLogBackend, error) {
+	if backend.AliSLSConfig == nil {
+		return nil, fmt.Errorf("no field named 'alisls_config' for backend type 'alisls'")
+	}
+	bac := backend.AliSLSConfig
+	var srcip string
+	if bac.OverrideWithRealIP {
+		rip, err := getInternetIP()
+		if err != nil {
+			srcip = bac.EmitIP
+		} else {
+			srcip = rip
+		}
+	} else {
+		srcip = bac.EmitIP
+	}
+	alicfg := hiedabke_alisls.AliSLSConfig{
+		DebugMode:       bac.EnableAliSLSProducerDebug,
+		AccessKeyID:     bac.Appkey,
+		AccessKeySecret: bac.Secret,
+		Endpoint:        bac.Endpoint,
+		Project:         bac.Project,
+		Logstore:        bac.Logstore,
+		Topic:           bac.Topic,
+		Hostname:        bac.Hostname,
+		AppName:         bac.AppName,
+		SourceIP:        srcip,
+	}
+	var aliextcfg *hiedabke_alisls.AliSLSConfigExtend
+	if bac.ExtendAliSLSConfig == nil {
+		aliextcfg = nil
+	} else {
+		aec := bac.ExtendAliSLSConfig
+		aliextcfg = &hiedabke_alisls.AliSLSConfigExtend{
+			TotalSizeLnBytes:      aec.TotalSizeLnBytes,
+			MaxIoWorkerCount:      aec.MaxIoWorkerCount,
+			MaxBlockSec:           aec.MaxBlockSec,
+			MaxBatchSize:          aec.MaxBatchSize,
+			MaxBatchCount:         aec.MaxBatchCount,
+			LingerMs:              aec.LingerMs,
+			Retries:               aec.Retries,
+			MaxReservedAttempts:   aec.MaxReservedAttempts,
+			BaseRetryBackoffMs:    aec.BaseRetryBackoffMs,
+			MaxRetryBackoffMs:     aec.MaxRetryBackoffMs,
+			AdjustShargHash:       aec.AdjustShargHash,
+			Buckets:               aec.Buckets,
+			AllowLogLevel:         aec.AllowLogLevel,
+			LogFileName:           aec.LogFileName,
+			IsJsonType:            aec.IsJsonType,
+			LogMaxSize:            aec.LogMaxSize,
+			LogMaxBackups:         aec.LogMaxBackups,
+			LogCompress:           aec.LogCompress,
+			NoRetryStatusCodeList: aec.NoRetryStatusCodeList,
+		}
+	}
+	ah := hiedabke_alisls.NewHiedaBackendAliSLS(alicfg, aliextcfg)
+	hyu.alisls_bknlist = append(hyu.alisls_bknlist, ah)
+	return ah, nil
+}
+
+func getInternetIP() (string, error) {
+	return "", fmt.Errorf("not supported yet")
+}
+
+type HiedaLogYamlUtil struct {
+	alisls_bknlist []*hiedabke_alisls.HiedaBackendAliSLS
+	Logger         *hiedalog.HiedaLogger
+}
+
+func (hyu *HiedaLogYamlUtil) StartAliSLS() {
+	for _, v := range hyu.alisls_bknlist {
+		v.StartProducer()
+	}
+}
+
+func (hyu *HiedaLogYamlUtil) SafeShutdown() {
+	for _, v := range hyu.alisls_bknlist {
+		v.SafeStopProducer()
+	}
+}
+
+func (hyu *HiedaLogYamlUtil) Shutdown(timeout_ms int64) error {
+	for _, v := range hyu.alisls_bknlist {
+		err := v.StopProducer(timeout_ms)
+		if err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+func CreateHiedaLoggerFromYAMLData(cfgdata CommonLogConfigYAML, autoStartAliSLS bool) (*HiedaLogYamlUtil, error) {
+	hyu := &HiedaLogYamlUtil{
+		Logger:         hiedalog.NewHiedaLogger(),
+		alisls_bknlist: make([]*hiedabke_alisls.HiedaBackendAliSLS, 0),
+	}
+	if cfgdata.Enable {
+		if cfgdata.Backends != nil {
+			for k, v := range cfgdata.Backends {
+				lvi := hyu.Logger.LevelFilter.NameToID(v.Level)
+				if lvi == 7 {
+					return nil, fmt.Errorf("unsupported log level '%s' in backend '%s'", v.Level, k)
+				}
+				var cins hiedalog.HiedaLogBackend
+				var err error
+				switch v.Type {
+				case "console":
+					cins, err = hyu.parseConsoleBackend(v)
+				case "file":
+					cins, err = hyu.parseFileBackend(v)
+				case "alisls":
+					cins, err = hyu.parseAlislsBackend(v)
+				default:
+					return nil, fmt.Errorf("unsupported log backend type '%s' in backend '%s'", v.Type, k)
+				}
+				if err != nil {
+					return nil, err
+				}
+				hyu.Logger.AddBackend(cins, lvi)
+			}
+		}
+	}
+	if autoStartAliSLS {
+		hyu.StartAliSLS()
+	}
+	return hyu, nil
+}

+ 36 - 0
hiedabke_alisls/config.go

@@ -0,0 +1,36 @@
+package hiedabke_alisls
+
+type AliSLSConfig struct {
+	DebugMode       bool
+	AccessKeyID     string
+	AccessKeySecret string
+	Endpoint        string
+	Project         string
+	Logstore        string
+	Topic           string
+	Hostname        string
+	AppName         string
+	SourceIP        string
+}
+
+type AliSLSConfigExtend struct {
+	TotalSizeLnBytes      int64
+	MaxIoWorkerCount      int64
+	MaxBlockSec           int
+	MaxBatchSize          int64
+	MaxBatchCount         int
+	LingerMs              int64
+	Retries               int
+	MaxReservedAttempts   int
+	BaseRetryBackoffMs    int64
+	MaxRetryBackoffMs     int64
+	AdjustShargHash       bool
+	Buckets               int
+	AllowLogLevel         string
+	LogFileName           string
+	IsJsonType            bool
+	LogMaxSize            int
+	LogMaxBackups         int
+	LogCompress           bool
+	NoRetryStatusCodeList []int
+}

+ 70 - 0
hiedabke_alisls/hiedabke_alisls.go

@@ -0,0 +1,70 @@
+package hiedabke_alisls
+
+import (
+	"git.swzry.com/zry/GoHiedaLogger/hiedalog"
+	"github.com/aliyun/aliyun-log-go-sdk/producer"
+	"strconv"
+	"time"
+)
+
+type HiedaBackendAliSLS struct {
+	prod     *producer.Producer
+	proj     string
+	logstore string
+	topic    string
+	hostname string
+	appname  string
+	srcip    string
+}
+
+func (b *HiedaBackendAliSLS) EmitStringLog(module string, level hiedalog.HiedaLogLevel, content string) {
+	e := b.generateStringLogEntity(module, level, content)
+	b.emitLogToSLS(e)
+}
+
+func (b *HiedaBackendAliSLS) EmitComplexLog(module string, level hiedalog.HiedaLogLevel, data map[string]string) {
+	e := b.generateComplexLogEntity(module, level, data)
+	b.emitLogToSLS(e)
+}
+
+func (l *HiedaBackendAliSLS) generateStringLogEntity(module string, level hiedalog.HiedaLogLevel, logcontent string) map[string]string {
+	return map[string]string{
+		"hostname": l.hostname,
+		"app":      l.appname,
+		"module":   module,
+		"level_id": strconv.Itoa(int(level.LevelNumber)),
+		"level":    level.Name,
+		"content":  logcontent,
+	}
+}
+
+func (l *HiedaBackendAliSLS) generateComplexLogEntity(module string, level hiedalog.HiedaLogLevel, data map[string]string) map[string]string {
+	m := map[string]string{
+		"hostname": l.hostname,
+		"app":      l.appname,
+		"module":   module,
+		"level_id": strconv.Itoa(int(level.LevelNumber)),
+		"level":    level.Name,
+	}
+	for k, v := range data {
+		m[k] = v
+	}
+	return m
+}
+
+func (l *HiedaBackendAliSLS) emitLogToSLS(entity map[string]string) {
+	log := producer.GenerateLog(uint32(time.Now().Unix()), entity)
+	_ = l.prod.SendLog(l.proj, l.logstore, l.topic, l.srcip, log)
+}
+
+func (l *HiedaBackendAliSLS) StartProducer() {
+	l.prod.Start()
+}
+
+func (l *HiedaBackendAliSLS) StopProducer(timeout_ms int64) error {
+	return l.prod.Close(timeout_ms)
+}
+
+func (l *HiedaBackendAliSLS) SafeStopProducer() {
+	l.prod.SafeClose()
+}

+ 47 - 0
hiedabke_alisls/newlogger.go

@@ -0,0 +1,47 @@
+package hiedabke_alisls
+
+import "github.com/aliyun/aliyun-log-go-sdk/producer"
+
+func NewHiedaBackendAliSLS(cfg AliSLSConfig, extraAliSLSConfig *AliSLSConfigExtend) *HiedaBackendAliSLS {
+	o := &HiedaBackendAliSLS{
+		proj:     cfg.Project,
+		logstore: cfg.Logstore,
+		topic:    cfg.Topic,
+		hostname: cfg.Hostname,
+		appname:  cfg.AppName,
+		srcip:    cfg.SourceIP,
+	}
+	alicfg := producer.GetDefaultProducerConfig()
+	alicfg.Endpoint = cfg.Endpoint
+	alicfg.AccessKeyID = cfg.AccessKeyID
+	alicfg.AccessKeySecret = cfg.AccessKeySecret
+	if extraAliSLSConfig != nil {
+		alicfg.TotalSizeLnBytes = extraAliSLSConfig.TotalSizeLnBytes
+		alicfg.MaxIoWorkerCount = extraAliSLSConfig.MaxIoWorkerCount
+		alicfg.MaxBlockSec = extraAliSLSConfig.MaxBlockSec
+		alicfg.MaxBatchSize = extraAliSLSConfig.MaxBatchSize
+		alicfg.MaxBatchCount = extraAliSLSConfig.MaxBatchCount
+		alicfg.LingerMs = extraAliSLSConfig.LingerMs
+		alicfg.Retries = extraAliSLSConfig.Retries
+		alicfg.MaxReservedAttempts = extraAliSLSConfig.MaxReservedAttempts
+		alicfg.BaseRetryBackoffMs = extraAliSLSConfig.BaseRetryBackoffMs
+		alicfg.MaxRetryBackoffMs = extraAliSLSConfig.MaxRetryBackoffMs
+		alicfg.AdjustShargHash = extraAliSLSConfig.AdjustShargHash
+		alicfg.Buckets = extraAliSLSConfig.Buckets
+		alicfg.AllowLogLevel = extraAliSLSConfig.AllowLogLevel
+		alicfg.LogFileName = extraAliSLSConfig.LogFileName
+		alicfg.IsJsonType = extraAliSLSConfig.IsJsonType
+		alicfg.LogMaxSize = extraAliSLSConfig.LogMaxSize
+		alicfg.LogMaxBackups = extraAliSLSConfig.LogMaxBackups
+		alicfg.LogCompress = extraAliSLSConfig.LogCompress
+		alicfg.NoRetryStatusCodeList = extraAliSLSConfig.NoRetryStatusCodeList
+	} else {
+		if cfg.DebugMode {
+			alicfg.AllowLogLevel = "debug"
+		} else {
+			alicfg.AllowLogLevel = "error"
+		}
+	}
+	o.prod = producer.InitProducer(alicfg)
+	return o
+}

+ 33 - 0
hiedabke_writer/formatter.go

@@ -0,0 +1,33 @@
+package hiedabke_writer
+
+import (
+	"encoding/json"
+	"fmt"
+)
+
+type HiedaComplexLogFormatter interface {
+	FormatHiedaComplexLog(module string, level string, data map[string]string) string
+}
+
+type HiedaStringLogFormatter interface {
+	FormatHiedaStringLog(module string, level string, data string) string
+}
+
+type HiedaComplexLogJsonFormatter struct {
+}
+
+func (f *HiedaComplexLogJsonFormatter) FormatHiedaComplexLog(module string, level string, data map[string]string) string {
+	jd, err := json.Marshal(data)
+	if err != nil {
+		return "{}"
+	}
+	ps := string(jd)
+	return fmt.Sprintf("[%s] <%s> %s", module, level, ps)
+}
+
+type HiedaStringLogDefaultFormatter struct {
+}
+
+func (f *HiedaStringLogDefaultFormatter) FormatHiedaStringLog(module string, level string, data string) string {
+	return fmt.Sprintf("[%s] <%s> %s", module, level, data)
+}

+ 37 - 0
hiedabke_writer/hiedabke_write.go

@@ -0,0 +1,37 @@
+package hiedabke_writer
+
+import (
+	"fmt"
+	"git.swzry.com/zry/GoHiedaLogger/hiedalog"
+	"io"
+)
+
+type HiedaBackendWriter struct {
+	writer           io.Writer
+	stringFormatter  HiedaStringLogFormatter
+	complexFormatter HiedaComplexLogFormatter
+}
+
+func NewHiedaBackendWriter(writer io.Writer) *HiedaBackendWriter {
+	return &HiedaBackendWriter{
+		writer:           writer,
+		complexFormatter: &HiedaComplexLogJsonFormatter{},
+		stringFormatter:  &HiedaStringLogDefaultFormatter{},
+	}
+}
+
+func (b *HiedaBackendWriter) SetStringFormatter(f HiedaStringLogFormatter) {
+	b.stringFormatter = f
+}
+
+func (b *HiedaBackendWriter) SetComplexFormatter(f HiedaComplexLogFormatter) {
+	b.complexFormatter = f
+}
+
+func (b *HiedaBackendWriter) EmitStringLog(module string, level hiedalog.HiedaLogLevel, content string) {
+	_, _ = fmt.Fprintln(b.writer, b.stringFormatter.FormatHiedaStringLog(module, level.Name, content))
+}
+
+func (b *HiedaBackendWriter) EmitComplexLog(module string, level hiedalog.HiedaLogLevel, data map[string]string) {
+	_, _ = fmt.Fprintln(b.writer, b.complexFormatter.FormatHiedaComplexLog(module, level.Name, data))
+}

+ 69 - 0
hiedalog/hiedalog.go

@@ -0,0 +1,69 @@
+package hiedalog
+
+import "fmt"
+
+type HiedaLogBackend interface {
+	EmitStringLog(module string, level HiedaLogLevel, content string)
+	EmitComplexLog(module string, level HiedaLogLevel, data map[string]string)
+}
+
+type HiedaLogBackendConfig struct {
+	Backend       HiedaLogBackend
+	FilterLevelID uint8
+}
+
+type HiedaLogger struct {
+	BackendConfigs []HiedaLogBackendConfig
+	LevelFilter    HiedaLogLevelFilter
+}
+
+func NewHiedaLogger() *HiedaLogger {
+	hl := &HiedaLogger{
+		BackendConfigs: make([]HiedaLogBackendConfig, 0),
+		LevelFilter:    NewDefaultLevelFilter(),
+	}
+	return hl
+}
+
+func (l *HiedaLogger) AddBackend(backend HiedaLogBackend, filterLevelID uint8) {
+	l.BackendConfigs = append(l.BackendConfigs, HiedaLogBackendConfig{
+		Backend:       backend,
+		FilterLevelID: filterLevelID,
+	})
+}
+
+func (l *HiedaLogger) LogString(module, levelName, content string) {
+	for _, v := range l.BackendConfigs {
+		ce, lv := l.LevelFilter.CanEmitEx(levelName, v.FilterLevelID)
+		if ce {
+			v.Backend.EmitStringLog(module, lv, content)
+		}
+	}
+}
+
+func (l *HiedaLogger) LogComplex(module, levelName string, data map[string]string) {
+	for _, v := range l.BackendConfigs {
+		ce, lv := l.LevelFilter.CanEmitEx(levelName, v.FilterLevelID)
+		if ce {
+			v.Backend.EmitComplexLog(module, lv, data)
+		}
+	}
+}
+
+func (l *HiedaLogger) EmitStringLog(module string, level HiedaLogLevel, content string) {
+	l.LogString(module, level.Name, content)
+}
+
+func (l *HiedaLogger) EmitComplexLog(module string, level HiedaLogLevel, data map[string]string) {
+	l.LogComplex(module, level.Name, data)
+}
+
+func (l *HiedaLogger) LogPrint(module, level string, d ...interface{}) {
+	s := fmt.Sprint(d...)
+	l.LogString(module, level, s)
+}
+
+func (l *HiedaLogger) LogPrintf(module, level, format string, d ...interface{}) {
+	s := fmt.Sprintf(format, d...)
+	l.LogString(module, level, s)
+}

+ 82 - 0
hiedalog/level.go

@@ -0,0 +1,82 @@
+package hiedalog
+
+const (
+	DLN_FATAL   = "FATAL"
+	DLN_PANIC   = "PANIC"
+	DLN_ERROR   = "ERROR"
+	DLN_WARN    = "WARN"
+	DLN_INFO    = "INFO"
+	DLN_VERBOSE = "VERBOSE"
+	DLN_DEBUG   = "DEBUG"
+)
+
+type HiedaLogLevel struct {
+	Name        string
+	LevelNumber uint8
+}
+
+type HiedaLogLevelFilter struct {
+	levelNameMap   map[string]HiedaLogLevel
+	defaultLevelID uint8
+}
+
+func (f *HiedaLogLevelFilter) NameToID(name string) uint8 {
+	v, ok := f.levelNameMap[name]
+	if ok {
+		return v.LevelNumber
+	} else {
+		return f.defaultLevelID
+	}
+}
+
+func (f *HiedaLogLevelFilter) NameToLevel(name string) HiedaLogLevel {
+	v, ok := f.levelNameMap[name]
+	if ok {
+		return v
+	} else {
+		return HiedaLogLevel{Name: name, LevelNumber: f.defaultLevelID}
+	}
+}
+
+func (f *HiedaLogLevelFilter) CanEmit(name string, reflevel uint8) bool {
+	id := f.NameToID(name)
+	return id <= reflevel
+}
+
+func (f *HiedaLogLevelFilter) CanEmitEx(name string, reflevel uint8) (bool, HiedaLogLevel) {
+	l := f.NameToLevel(name)
+	return l.LevelNumber <= reflevel, l
+}
+
+func (f *HiedaLogLevelFilter) GetLevelList() []HiedaLogLevel {
+	o := make([]HiedaLogLevel, 0, len(f.levelNameMap))
+	for _, v := range f.levelNameMap {
+		o = append(o, v)
+	}
+	return o
+}
+
+func NewLevelFilter(levelList []HiedaLogLevel, defaultLevelID uint8) HiedaLogLevelFilter {
+	m := make(map[string]HiedaLogLevel)
+	for _, v := range levelList {
+		m[v.Name] = v
+	}
+	return HiedaLogLevelFilter{
+		levelNameMap:   m,
+		defaultLevelID: defaultLevelID,
+	}
+}
+
+func NewDefaultLevelFilter() HiedaLogLevelFilter {
+	ld := []HiedaLogLevel{
+		{Name: "FATAL", LevelNumber: 0},
+		{Name: "PANIC", LevelNumber: 1},
+		{Name: "ERROR", LevelNumber: 2},
+		{Name: "WARN", LevelNumber: 3},
+		{Name: "INFO", LevelNumber: 4},
+		{Name: "VERBOSE", LevelNumber: 5},
+		{Name: "DEBUG", LevelNumber: 6},
+		{Name: "UNKNOWN", LevelNumber: 7},
+	}
+	return NewLevelFilter(ld, 7)
+}

+ 1 - 0
tests/alisls_test/.gitignore

@@ -0,0 +1 @@
+config.secret.yaml

+ 64 - 0
tests/alisls_test/main.go

@@ -0,0 +1,64 @@
+package main
+
+import (
+	"fmt"
+	"git.swzry.com/zry/GoHiedaLogger/hiedabke_alisls"
+	"git.swzry.com/zry/GoHiedaLogger/hiedalog"
+	"gopkg.in/yaml.v2"
+	"io/ioutil"
+	"strconv"
+	"time"
+)
+
+type YAML_AliSLSCfg struct {
+	Appkey   string `yaml:"appkey"`
+	Secret   string `yaml:"secret"`
+	Endpoint string `yaml:"endpoint"`
+	Project  string `yaml:"project"`
+	Logstore string `yaml:"logstore"`
+	Topic    string `yaml:"topic"`
+}
+
+var Config YAML_AliSLSCfg
+
+func main() {
+	yb, err := ioutil.ReadFile("config.secret.yaml")
+	if err != nil {
+		fmt.Println("Unable to read config.secret.yaml:", err.Error())
+		return
+	}
+	err = yaml.Unmarshal(yb, &Config)
+	if err != nil {
+		fmt.Println("Unable to parse config.secret.yaml:", err.Error())
+		return
+	}
+	alicfg := hiedabke_alisls.AliSLSConfig{
+		DebugMode:       true,
+		AccessKeyID:     Config.Appkey,
+		AccessKeySecret: Config.Secret,
+		Endpoint:        Config.Endpoint,
+		Project:         Config.Project,
+		Logstore:        Config.Logstore,
+		Topic:           Config.Topic,
+		AppName:         "HiedaLoggerAliSLSTest2",
+		SourceIP:        "10.2.2.2",
+	}
+	lbas := hiedabke_alisls.NewHiedaBackendAliSLS(alicfg, nil)
+	lbas.StartProducer()
+	fmt.Println(time.Now().String(), "Producer Ready.")
+	logger := hiedalog.NewHiedaLogger()
+	logger.AddBackend(lbas, logger.LevelFilter.NameToID("VERBOSE"))
+	logger.AddBackend(lbas, logger.LevelFilter.NameToID("WARN"))
+	logger.LogPrint("app", hiedalog.DLN_INFO, "hello", "world")
+	for _, v := range logger.LevelFilter.GetLevelList() {
+		logger.LogPrintf("logtest", v.Name, "LogLevelFilerTest: name=%s, lvid=%d", v.Name, v.LevelNumber)
+		logger.LogComplex("cltest", v.Name, map[string]string{
+			"name":  v.Name,
+			"id":    strconv.Itoa(int(v.LevelNumber)),
+			"extra": "hello",
+		})
+	}
+	fmt.Println(time.Now().String(), "Log Task Over.")
+	lbas.SafeStopProducer()
+	fmt.Println(time.Now().String(), "Producer Stop.")
+}

+ 6 - 0
tests/alisls_test/sample.config.secret.yaml

@@ -0,0 +1,6 @@
+appkey: "<YourAccessKeyID>"
+secret: "<YourAccessKeySecret>"
+endpoint: "<Region>.log.aliyuncs.com"
+project: "<YourProject>"
+logstore: "<Logstore Name>"
+topic: "<Log Topic>"

+ 25 - 0
tests/stdout_test/main.go

@@ -0,0 +1,25 @@
+package main
+
+import (
+	"git.swzry.com/zry/GoHiedaLogger/hiedabke_writer"
+	"git.swzry.com/zry/GoHiedaLogger/hiedalog"
+	"os"
+	"strconv"
+)
+
+func main() {
+	lbw := hiedabke_writer.NewHiedaBackendWriter(os.Stdout)
+	logger := hiedalog.NewHiedaLogger()
+	logger.AddBackend(lbw, logger.LevelFilter.NameToID("VERBOSE"))
+	logger.AddBackend(lbw, logger.LevelFilter.NameToID("WARN"))
+	logger.LogPrint("app", hiedalog.DLN_INFO, "hello", "world")
+	for _, v := range logger.LevelFilter.GetLevelList() {
+		logger.LogPrintf("logtest", v.Name, "LogLevelFilerTest: name=%s, lvid=%d", v.Name, v.LevelNumber)
+		logger.LogComplex("cltest", v.Name, map[string]string{
+			"name":  v.Name,
+			"id":    strconv.Itoa(int(v.LevelNumber)),
+			"extra": "hello",
+		})
+	}
+
+}

+ 2 - 0
tests/yamlutil_test/.gitignore

@@ -0,0 +1,2 @@
+config.yaml
+*.log

+ 114 - 0
tests/yamlutil_test/config.sample.yaml

@@ -0,0 +1,114 @@
+yaml_util_test:
+  # Enable logger
+  enable: true
+  # Backends
+  backends:
+    # A sample of stdout backend.
+    sample_backend_console_stdout:
+      # Should be one of 'console', 'file', 'alisls'
+      type: "console"
+      # Minimum emit level, should be one of 'DEBUG', 'VERBOSE', 'INFO', 'WARN', 'ERROR', 'PANIC', 'FATAL'
+      level: "DEBUG"
+      # This field should be defined for type 'console'
+      console_config:
+        # Should be one of 'stdout', 'stderr'
+        to: "stdout"
+    # A sample of file without log-rotate
+    sample_backend_no_rotate_file:
+      # Should be one of 'console', 'file', 'alisls'
+      type: "file"
+      # Minimum emit level, should be one of 'DEBUG', 'VERBOSE', 'INFO', 'WARN', 'ERROR', 'PANIC', 'FATAL'
+      level: "INFO"
+      # This field should be defined for type 'file'
+      file_config:
+        # The path to log file
+        filename: "no-rotate-info.log"
+        # Use 'true' for using console backend formatter for file backend
+        fvconsole: false
+        # Use 'false' for disable rotate
+        rotate: false
+    # A sample of file without log-rotate
+    sample_backend_rotate_file:
+      # Should be one of 'console', 'file', 'alisls'
+      type: "file"
+      # Minimum emit level, should be one of 'DEBUG', 'VERBOSE', 'INFO', 'WARN', 'ERROR', 'PANIC', 'FATAL'
+      level: "VERBOSE"
+      # This field should exist for type 'file'
+      file_config:
+        # The path to log file
+        filename: "rotate-verbose.log"
+        # Use 'true' for using console backend formatter for file backend
+        fvconsole: false
+        # Use 'true' for enable rotate
+        rotate: true
+        # This field should exist for rotate log
+        rotate_config:
+          # The maximum size in megabytes of the log file before it gets rotated
+          maxsize: 10
+          # The maximum number of days to retain old log files based on the timestamp encoded in their filename.
+          # Note that a day is defined as 24 hours and may not exactly correspond to calendar days due to daylight
+          # savings, leap seconds, etc. '0' for not to remove old log files based on age
+          maxage: 0
+          # The maximum number of old log files to retain. '0' for retain all old log files
+          # (though MaxAge may still cause them to get deleted.)
+          maxbackups: 0
+          # This determines if the time used for formatting the timestamps in backup files is the computer's local time.
+          # 'false' to use UTC time, 'true' to use local time.
+          localtime: true
+          # This determines if the rotated log files should be compressed using gzip.
+          # 'true' for using gzip, 'false' for not using gzip
+          compress: true
+    sample_backend_alisls:
+      # Should be one of 'console', 'file', 'alisls'
+      type: "alisls"
+      # Minimum emit level, should be one of 'DEBUG', 'VERBOSE', 'INFO', 'WARN', 'ERROR', 'PANIC', 'FATAL'
+      level: "DEBUG"
+      # This field should exist for type 'file'
+      alisls_config:
+        # Use 'true' for enable AliSLS producer debug log.
+        # AliSLS producer will print log to stdout, and we have no way to disable it,
+        # but we can specify the output level. 'true' for level 'debug', 'false' for level 'error'
+        alisls_producer_debug: false
+        # Your Hieda Logger hostname
+        hostname: "<Your HiedaLogger Hostname>"
+        # Your Hieda Logger app name
+        appname: "<Your HiedaLogger App Name>"
+        # Your AliSLS appkey id
+        ak_id: "<Your AliSLS Appkey ID>"
+        # Your AliSLS appkey secret
+        ak_secret: "<Your AliSLS Appkey Secret>"
+        # Your AliSLS endpoint
+        endpoint: "<Your AliSLS Endpoint>"
+        # Your AliSLS project
+        project: "<Your AliSLS Project>"
+        # Your AliSLS logstore
+        logstore: "<Your AliSLS Logstore>"
+        # Your AliSLS topic
+        topic: "<Your AliSLS Topic>"
+        # The IP address for AliSLS emit parameter 'source'
+        emit_ip: "0.0.0.0"
+        # Use 'true' to override 'emit_ip' below with your real Internet IP address if possible
+        override_with_real_ip: false
+        # Do not specify this in most situation. Refer to Aliyun SLS documentation.
+#        extend_alisls_config:
+#          total_size_ln_bytes: 104857600
+#          max_io_worker_count: 50
+#          max_block_sec: 60
+#          max_batch_size: 524288
+#          max_batch_count: 4096
+#          linger_ms: 2000
+#          retries: 10
+#          max_reserved_attempts: 11
+#          base_retry_backoff_ms: 100
+#          max_retry_backoff_ms: 50000
+#          adjust_sharg_hash: true
+#          buckets: 64
+#          allow_log_level: ""
+#          log_file_name: ""
+#          is_json_type: false
+#          log_max_size: 0
+#          log_max_backups: 0
+#          log_compress: false
+#          no_retry_status_code_list:
+#            - 400
+#            - 404

+ 47 - 0
tests/yamlutil_test/main.go

@@ -0,0 +1,47 @@
+package main
+
+import (
+	"fmt"
+	"git.swzry.com/zry/GoHiedaLogger/hieda_yamlutil"
+	"git.swzry.com/zry/GoHiedaLogger/hiedalog"
+	"gopkg.in/yaml.v2"
+	"io/ioutil"
+	"strconv"
+	"time"
+)
+
+type Config struct {
+	HLCfg hieda_yamlutil.CommonLogConfigYAML `yaml:"yaml_util_test"`
+}
+
+func main() {
+	ycfd, err := ioutil.ReadFile("config.yaml")
+	if err != nil {
+		fmt.Println("Failed Read File 'config.yaml':", err)
+		return
+	}
+	var ycd Config
+	err = yaml.Unmarshal(ycfd, &ycd)
+	if err != nil {
+		fmt.Println("Failed Parse File 'config.yaml':", err)
+		return
+	}
+	hyu, err := hieda_yamlutil.CreateHiedaLoggerFromYAMLData(ycd.HLCfg, true)
+	if err != nil {
+		fmt.Println("Failed Init Logger:", err)
+		return
+	}
+	logger := hyu.Logger
+	logger.LogPrint("app", hiedalog.DLN_INFO, "hello", "world")
+	for _, v := range logger.LevelFilter.GetLevelList() {
+		logger.LogPrintf("logtest", v.Name, "LogLevelFilerTest: name=%s, lvid=%d", v.Name, v.LevelNumber)
+		logger.LogComplex("cltest", v.Name, map[string]string{
+			"name":  v.Name,
+			"id":    strconv.Itoa(int(v.LevelNumber)),
+			"extra": "hello",
+		})
+	}
+	fmt.Println(time.Now().String(), "Log Task Over.")
+	hyu.SafeShutdown()
+	fmt.Println(time.Now().String(), "Producer Stop.")
+}