Golang實戰之海量日誌收集系統(七)logTransfer之從kafka中獲取日誌信息

目錄:

GitHub項目地址https://github.com/PlutoaCharon/Golang_logCollect

Golang實戰之海量日誌收集系統(一)項目背景介紹

Golang實戰之海量日誌收集系統(二)收集應用程序日誌到Kafka中

Golang實戰之海量日誌收集系統(三)簡單版本logAgent的實現

Golang實戰之海量日誌收集系統(四)etcd介紹與使用etcd獲取配置信息

Golang實戰之海量日誌收集系統(五)根據etcd配置項創建多個tailTask

Golang實戰之海量日誌收集系統(六)監視etcd配置項的變更

Golang實戰之海量日誌收集系統(七)logTransfer之從kafka中獲取日誌信息

Golang實戰之海量日誌收集系統(八)logTransfer之將日誌入庫到Elasticsearch並通過Kibana進行展示

在前六章中已經完成了日誌收集的logAgent端, 接下來需要將日誌數據寫入到Kafka中, 然後將數據落地到Elastciseartch
項目架構圖:
在這裏插入圖片描述

項目邏輯圖:
在這裏插入圖片描述

Elastciseartch與Kinaba下載與安裝

Elastciseartch與Kinaba下載與安裝, 參考我這篇博客

使用Golang控制Elasticsearch

環境版本:

go 1.14
elasticsearch-7.6.1
kibana-7.6.1-windows-x86_64

我們使用第三方庫github.com/olivere/elastic

6.X包路徑爲github.com/olivere/elastic 7.X的包路徑爲github.com/olivere/elastic/v7,根據自己安裝的elasticsearch版本進行區分

elastic庫的官方文檔https://olivere.github.io/elastic/上面有更加詳細的實例

package main

import (
	"context"
	"fmt"
	"github.com/olivere/elastic/v7"
)

type Tweet struct {
	User    string
	Message string
}

func main() {
	client, err := elastic.NewClient(elastic.SetSniff(false), elastic.SetURL("http://localhost:9200/"))
	if err != nil {
		fmt.Println("connect es error", err)
		return
	}

	fmt.Println("conn es succ")

	tweet := Tweet{User: "haohan", Message: "This is a test"}
	_, err = client.Index().
		Index("twitter").
		Id("1").
		BodyJson(tweet).
		Do(context.Background())
	if err != nil {
		// Handle error
		panic(err)
		return
	}

	fmt.Println("insert succ")
}

插入成功

conn es succ
insert succ

在這裏插入圖片描述

kafka消費示例

前面介紹了寫數據到kafka中,數據需要從消息隊列裏面取出最終落地到es中。簡單介紹下從kafka中獲取數據

package main

import (
	"fmt"
	"github.com/Shopify/sarama"
	"strings"
	"sync"
)

func main() {

	consumer, err := sarama.NewConsumer(strings.Split("localhost:9092",","), nil)
	if err != nil {
		fmt.Println("Failed to start consumer: %s", err)
		return
	}
	partitionList, err := consumer.Partitions("nginx_log")
	if err != nil {
		fmt.Println("Failed to get the list of partitions: ", err)
		return
	}
	fmt.Println(partitionList)

	// 按照分區來消費
	for partition := range partitionList {
		pc, err := consumer.ConsumePartition("nginx_log", int32(partition), sarama.OffsetNewest)
		if err != nil {
			fmt.Printf("Failed to start consumer for partition %d: %s\n", partition, err)
			return
		}
		defer pc.AsyncClose()
		go func(pc sarama.PartitionConsumer) {
			for msg := range pc.Messages() {
				fmt.Printf("Partition:%d, Offset:%d, Key:%s, Value:%s", msg.Partition, msg.Offset, string(msg.Key), string(msg.Value))
				fmt.Println()
			
		}(pc)
	}
	time.Sleep(time.Hour)
	_ = consumer.Close()
}
Partition:0, Offset:34208, Key:, Value:This is a test!
Partition:0, Offset:34209, Key:, Value:

已經能夠從kafka中拿到日誌信息了

下面開始logTransfer服務的開發,數據已經到kafka了,現在要從kafka中消費數據,然後寫到es中,logTransfer做的就是這個工作

初識化logTransfer配置

項目目錄:

├─config
│      logTransfer.conf
│
├─es
│      elasticsearch.go
│   
├─logs
│      my.log
│
└─main
		kafka.go
        config.go
        log.go
        main.go

main.go中填寫初始化信息:

package main

import (
	"github.com/astaxie/beego/logs"
	"logCollect/logTransfer/kafka"
)

func main() {
	// 初始化配置
	err := InitConfig("ini", "E:\\Go\\logCollect\\logTransfer\\config\\logTransfer.conf")
	if err != nil {
		panic(err)
		return
	}
	logs.Debug("初始化配置成功")

	//初始化日誌模塊
	err = initLogger(logConfig.LogPath, logConfig.LogLevel)
	if err != nil {
		panic(err)
		return
	}
	logs.Debug("初始化日誌模塊成功")

	// 初始化Kafka
	err = kafka.InitKafka(logConfig.KafkaAddr, logConfig.KafkaTopic)
	if err != nil {
		logs.Error("初始化Kafka失敗, err:", err)
		return
	}
	logs.Debug("初始化Kafka成功")
}

logTransfer.conf

[logs]
log_level = debug
log_path = "E:\\Go\\logCollect\\logTransfer\\logs\\my.log"

[kafka]
server_addr = localhost:9092
topic = nginx_log

[elasticsearch]
addr = http://localhost:9200/

config.go

package main

import (
	"fmt"
	"github.com/astaxie/beego/config"
)

type LogConfig struct {
	KafkaAddr  string
	KafkaTopic string
	EsAddr     string
	LogPath    string
	LogLevel   string
}

var (
	logConfig *LogConfig
)

func InitConfig(confType string, filename string) (err error) {
	conf, err := config.NewConfig(confType, filename)
	if err != nil {
		fmt.Printf("初始化配置文件出錯:%v\n", err)
		return
	}
	// 導入配置信息
	logConfig = &LogConfig{}
	// 日誌級別
	logConfig.LogLevel = conf.String("logs::log_level")
	if len(logConfig.LogLevel) == 0 {
		logConfig.LogLevel = "debug"
	}
	// 日誌輸出路徑
	logConfig.LogPath = conf.String("logs::log_path")
	if len(logConfig.LogPath) == 0 {
		logConfig.LogPath = "E:\\Go\\logCollect\\logTransfer\\logs\\my.log"
	}

	// Kafka
	logConfig.KafkaAddr = conf.String("kafka::server_addr")
	if len(logConfig.KafkaAddr) == 0 {
		err = fmt.Errorf("初識化Kafka addr失敗")
		return
	}
	logConfig.KafkaTopic = conf.String("kafka::topic")
	if len(logConfig.KafkaAddr) == 0 {
		err = fmt.Errorf("初識化Kafka topic失敗")
		return
	}

	// Es
	logConfig.EsAddr = conf.String("elasticsearch::addr")
	if len(logConfig.EsAddr) == 0 {
		err = fmt.Errorf("初識化Es addr失敗")
		return
	}
	return
}

log,go

package main

import (
	"encoding/json"
	"fmt"
	"github.com/astaxie/beego/logs"
)

func convertLogLevel(level string) int {

	switch level {
	case "debug":
		return logs.LevelDebug
	case "warn":
		return logs.LevelWarn
	case "info":
		return logs.LevelInfo
	case "trace":
		return logs.LevelTrace
	}
	return logs.LevelDebug
}

func initLogger(logPath string, logLevel string) (err error) {

	config := make(map[string]interface{})
	config["filename"] = logPath
	config["level"] = convertLogLevel(logLevel)
	configStr, err := json.Marshal(config)
	if err != nil {
		fmt.Println("初始化日誌, 序列化失敗:", err)
		return
	}
	_ = logs.SetLogger(logs.AdapterFile, string(configStr))

	return
}

kafka.go

package kafka

import (
	"github.com/Shopify/sarama"
	"github.com/astaxie/beego/logs"
	"strings"
)

type KafkaClient struct {
	client sarama.Consumer
	addr   string
	topic  string
}

var (
	kafkaClient *KafkaClient
)

func InitKafka(addr string, topic string) (err error) {

	kafkaClient = &KafkaClient{}
	consumer, err := sarama.NewConsumer(strings.Split(addr, ","), nil)
	if err != nil {
		logs.Error("啓動Kafka消費者錯誤: %s", err)
		return nil
	}
	kafkaClient.client = consumer
	kafkaClient.addr = addr
	kafkaClient.topic = topic
	return
}

運行main函數, 構建測試
輸出日誌在logs/my.log

2020/03/28 17:30:02.744 [D]  初始化日誌模塊成功
2020/03/28 17:30:02.778 [D]  初始化Kafka成功

初始化成功, 下面將使用Kafka消費數據, 並且將獲取到的數據存入ES中

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章