環境
# jdk
yum isntall -y java-1.8.0-openjdk
下載
- elasticsearch
https://www.elastic.co/cn/downloads/elasticsearch
- kibana
https://www.elastic.co/cn/downloads/kibana
- logstash
https://www.elastic.co/cn/downloads/logstash
安裝
本文 centos7.5 elasticsearch-7.3.1 kibana-7.3.2-linux-x86_64 logstash-7.3.2
cd /opt
# 解壓es
tar -zxf elasticsearch-7.3.1-linux-x86_64.tar.gz
# 解壓kibana
tar -zxf kibana-7.3.2-linux-x86_64.tar.gz
# 解壓logstash
tar -axf logstash-7.3.2.tar.gz
(本文下載到/opt下)
配置
- kibana
# 確認配置文件中連接的es地址
cat /opt/kibana-7.3.2-linux-x86_64/config/kibana.yml
# The URLs of the Elasticsearch instances to use for all your queries.
elasticsearch.hosts: ["http://localhost:9200"] # 28行左右
- logstash
# 新建logstash.conf 處理文件
input {
redis { #從redis中讀取數據
type => "common_api_access_log" #redis list名稱
host => "localhost" #redis host
password => '1234' #redis pwd
port => "6379" #redis port
data_type => "list" #redis key type
key => "common_api_access_log" #redis key
}
}
filter {
json {
source => "message" #數據
target => "msg" #json解析爲
skip_on_invalid_json => true
}
}
output {
elasticsearch { #
hosts => ["127.0.0.1:9200"]
index => "%{[tags]}-%{+YYYY.MM.dd}"
}
}
vim /opt/logstash-7.3.2/config/pipelines.yml
# 13行左右
- pipeline.id: another_test
queue.type: persisted
path.config: "/opt/logstash-7.3.2/config/logstash.conf" #加載剛纔的配置
啓動
# 啓動es
cd /elasticsearch-7.3.1/bin
su www
./elasticsearch &
# 啓動kibana
cd /opt/kibana-7.3.2-linux-x86_64/bin
su www
./kibana &
# 啓動logstash
cd /opt/logstash-7.3.2/bin
./logstash &
業務
結合logstash.conf 配置 日誌格式如下
# tags 和 message必須 值可自定義,message值最好爲json 這樣可以解析到索引,否則整個message將爲一個字符串
log := `{"tags": "ceshi", "message": {"request_path": "/hello/world", "params": {"a": "b", "c": "d"}}}`
redis := libs.Redis{}
#Lpush爲封裝的redis方法 參數1位redis key 參數2位redis value
redis.Lpush("common_api_access_log", log)
查詢
打開kibana dashboard
http://localhost:5601
點擊左側 Management
點擊 Kibana 下的 Index Patterns
點擊 create index pattern
輸入 ceshi*
點擊 next step
選擇 Time Filter field name 爲 @timestamp
點擊 create index pattern
添加完成就可以到 Discover中查詢我們剛寫入的日誌了