logstash模式(pattern)GitHub地址:https://github.com/logstash-plugins/logstash-patterns-core/tree/master/patterns
logstash grok調試地址:http://grok.qiexun.net/
ELK下載地址:https://www.elastic.co/cn/downloads
filebeat 部分配置
filebeat.inputs: # Each - is an input. Most options can be set at the input level, so # you can use different inputs for various configurations. # Below are the input specific configurations. - type: log # Change to true to enable this input configuration. enabled: True # Paths that should be crawled and fetched. Glob based paths. paths: - /data/log/mylog.log #- c:\programdata\elasticsearch\logs\* # 多行合併爲一行,每行以multiline.pattern匹配到的字符開頭,否則合併到同一行,向後合併 multiline.pattern: '^([0-9]?[1-9]|[0-9]{2}[1-9][0-9]{1}|[0-9]{1}[1-9][0-9]{2}|[1-9][0-9]{3})-(((0[13578]|1[02])-(0[1-9]|[12][0-9]|3[01]))|((0[469]|11)-(0[1-9]|[12][0-9]|30))|(02-(0[1-9]|[1][0-9]|2[0-8])))' multiline.negate: true multiline.match: after tags: ["my_app1"] fields: app_id: my_app env: prod
logstash部分配置
input { beats { port => 5000 # 端口可自定義 type => "log" } } filter { # 去除日誌中顏色,字體等配置 mutate { gsub => ["message", "\u001B\[[;\d]*m", ""] } # 根據不同來源的日誌進行判斷並分別解析 if "my_app1" in [tags] or "my_app2" in [tags] { grok { # 自定義pattern目錄 patterns_dir => ["/usr/local/logstash-6.4.3/config/pattern"] match => { # MYDATE,GREEDYDATAALL爲自定義的pattern message => "%{MYDATE:time}\s*\[%{DATA:thread}\]\s*%{LOGLEVEL:logLevel}\s*%{JAVACLASS:classInfo}\.\s*%{DATA:method}\s*-%{GREEDYDATAALL:message}" } overwrite => ["message"] } } else if "my_app3" in [tags] { dissect { mapping => { "message" => "%{time} %{logLevel} %{?useless} --- [%{thread}] %{classInfo} %{?space}: %{message}" } } } mutate { #只能刪除輸出的json裏面的_source字段裏面的東西 remove_field => ["beat", "kafka", "offset", "prospector"] } } output { elasticsearch { hosts => [ "http://127.0.0.1:9200" ] index => "logstash-%{[fields][env]}-%{[fields][app_id]}-%{+YYYY.MM.dd}" document_type => "%{type}" } }