ELK-5.0.1_生產實踐雜亂筆記

[root@centos7 ~]# cat /etc/redhat-release  (經實踐,elk5.0.1 需要內核3.點幾以上版本支持)

CentOS Linux release 7.2.1511 (Core)

IP: 本地 192.168.1.73

[root@centos7 ~]# cat /etc/hosts

127.0.0.1   localhost localhost.localdomain localhost4 localhost4.localdomain4

::1         localhost localhost.localdomain localhost6 localhost6.localdomain6

192.168.1.73 centos7.spring.study


[root@centos7 src]# tar zxf elasticsearch-5.0.1.tar.gz
[root@centos7 src]# mv  elasticsearch-5.0.1 /usr/local/
[root@centos7 local]# ln -s elasticsearch-5.0.1 /usr/local/elasticsearch
             
[root@centos7 config]# vim elasticsearch.yml
cluster.name: ranruichun
node.name: "linux-node1"
path.data: /usr/local/elasticsearch/data
path.logs: /usr/local/elasticsearch/logs
bootstrap.memory_lock: true

#groupadd  elk
#useradd  elk -g elk

#  su elk /usr/local/services/elk/elasticsearch-5.0.1/bin/elasticsearch


編寫啓動腳本
[root@centos7 elasticsearch]# cat  /usr/local/elasticsearch/run.sh
su elk -l -c "nohup /usr/local/elasticsearch/bin/elasticsearch >  /usr/local/elasticsearch/log.out &"
su elk -l -c "nohup /usr/local/elasticsearch/bin/elasticsearch >  /usr/local/elasticsearch/log.out &"

[root@centos7 elasticsearch]# vim /etc/security/limits.conf
* soft nofile 65536
* hard nofile 131072
* soft nproc 2048
* hard nproc 4096

[root@iZ25mh87ol1Z elasticsearch-5.0.1]# vim /etc/sysctl.conf
vm.max_map_count=655360
vm.swappiness = 0


[root@centos7 elasticsearch]# curl  http://192.168.1.73:9200
{
 "name" : "linux-node1",
 "cluster_name" : "xxxxxx",
 "cluster_uuid" : "IRmR9sPtSBqIqj5gA7oUiw",
 "version" : {
   "number" : "5.0.1",
   "build_hash" : "080bb47",
   "build_date" : "2016-11-11T22:08:49.812Z",
   "build_snapshot" : false,
   "lucene_version" : "6.2.1"
 },
 "tagline" : "You Know, for Search"
}


# su elk -l -c  "/usr/local/elasticsearch/bin/elasticsearch -d"  後臺啓動

# cd /usr/local/src/
[root@centos7 src]# git clone https://github.com/elastic/elasticsearch-servicewrapper.g


[root@centos7 src]# mv elasticsearch-servicewrapper/service/ /usr/local/elasticsearch/bin/

[root@centos7 service]# /usr/local/elasticsearch/bin/service/elasticsearch  install
Detected RHEL or Fedora:
Installing the Elasticsearch daemon..

[root@centos7 service]# ls /etc/init.d/elasticsearch
/etc/init.d/elasticsearch


[root@centos7 elasticsearch]# curl -i  -XGET 'http://192.168.1.73:9200/_count?pretty' -d '
{
   "query":{
         "match_all":{}
 }
}
'
HTTP/1.1 200 OK
content-type: application/json; charset=UTF-8
content-length: 95

{
 "count" : 0,
 "_shards" : {
   "total" : 0,
   "successful" : 0,
   "failed" : 0
 }
}



裝集羣管理插件  head
[root@centos7 src]# git clone git://github.com/mobz/elasticsearch-head.git

看文章
http://blog.csdn.net/reblue520/article/details/53909409

http://blog.csdn.net/sulei12341/article/details/52935271?locationNum=4&fps=1
http://hnr520.blog.51cto.com/4484939/1867033

[root@centos7 node_modules]# /usr/local/elasticsearch-head/node_modules/grunt/bin/grunt server


[root@centos7 elasticsearch-head]# npm install grunt --save
npm WARN package.json [email protected] license should be a valid SPDX license expression
[email protected] node_modules/grunt
├── [email protected]
... head 後面還要改很多參數纔可以訪問es (看上面文章)


Logstash

# cd  /usr/local/src
# wget https://download.elastic.co/logstash/logstash/logstash-1.5.4.tar.gz
#  tar  zxf  logstash-1.5.4.tar.gz
#  mv logstash-1.5.4  /usr/local/logstash
[root@rui local]# java -version  驗證java環境
java version "1.7.0_09-icedtea"

-- 生產建議是yum 安裝 (網上比我牛的人如此說)  這兒就介紹方法  
https://www.elastic.co/guide/en/logstash/current/installing-logstash.html
rpm --import https://artifacts.elastic.co/GPG-KEY-elasticsearch
[root@centos7 local]# cat /etc/yum.repos.d/logstash.repo
[logstash-5.x]
name=Elastic repository for 5.x packages
baseurl=https://artifacts.elastic.co/packages/5.x/yum
gpgcheck=1
gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch
enabled=1
autorefresh=1
type=rpm-md
[root@centos7 ~]# yum install logstash


啓動
/usr/local/logstash/bin/logstash -e 'input { stdin{} } output { stdout{} }'

[root@rui local]# /usr/local/logstash/bin/logstash -e 'input { stdin{} } output { stdout{codec => rubydebug} }'
Logstash startup completed
hehe
{
      "message" => "hehe",
     "@version" => "1",
   "@timestamp" => "2016-12-13T21:50:51.837Z",
         "host" => "rui.study.com"
}

[root@rui local]# /usr/local/logstash/bin/logstash -e 'input { stdin{} } output { elasticsearch { host => "192.168.1.104" protocol =>"<a href="http" }"="">http"} }'

配置文件 官方文檔
https://www.elastic.co/guide/en/logstash/current/configuration.html


生產需要

gork規則debug調試  好用!

https://grokdebug.herokuapp.com/
https://grokdebug.herokuapp.com/patterns#

http://www.open-open.com/lib/view/open1453623562651.html

apache日誌 --> 標準格式

tomcat 自定義日誌樣本
[root@iZ2535e0vgsZ filebeat-5.0.1-linux-x86_64]# tail -1  /root/tomcat/tomcat1/logs/fblive-web-www.log.2017-01-22.log
[iZ2535e0vgsZ|10.24.190.246|[fblive-web-www]|2017-01-22 18:44:04.665|[pool-6-thread-1]|WARN |org.hibernate.internal.util.xml.DTDEntityResolver|DTDEntityResolver.java|org.hibernate.internal.util.xml.DTDEntityResolver|resolveEntity|75|1485078461818|HHH000223: Recognized obsolete hibernate namespace http://hibernate.sourceforge.net/. Use namespace http://www.hibernate.org/dtd/ instead. Refer to Hibernate 3.6 Migration Guide!||||


修改tomcat匹配規則

# Log Levels

LOGLEVEL ([Aa]lert|ALERT|[Tt]race|TRACE|[Dd]ebug|DEBUG|[Nn]otice|NOTICE|[Ii]nfo |INFO |[Ww]arn?(?:ing)?|WARN?(?:ING)? |[Ee]rr?(?:or)?|ERR?(?:OR)?|[Cc]rit?(?:ical)?|CRIT?(?:ICAL)?|[Ff]atal|FATAL|[Ss]evere|SEVERE|EMERG(?:ENCY)?|[Ee]merg(?:ency)?)


TOMCAT_DATESTAMP 20%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}(?::?%{SECOND})

TOMCATLOG %{TOMCAT_DATESTAMP:timestamp} \| %{LOGLEVEL:level} \| %{JAVACLASS:class} - %{JAVALOGMESSAGE:logmessage}
TOMCATFBLOG \[%{IPORHOST:hostname}\|%{IP:serverip}\|%{SYSLOG5424SD:application}\|%{TOMCAT_DATESTAMP:timestamp}\|\[%{DATA:thread}\]\|%{LOGLEVEL:level}\|%{JAVACLASS:logger}\|%{JAVACLASS:file}\|%{JAVACLASS:class}\|%{HOSTNAME:method}\|%{NUMBER:line}\|%{NUMBER:lsn}\|%{GREEDYDATA:msg}


logstash配置文件樣列(報警設置是linux mail 發送 有錯誤寫進文件,定時任務有錯誤每5分鐘發送,有時間再學習更科學的方法,有錯誤來直接發)

input {
   beats {
       port => 9500
       #mode => "server"
       ssl => false
   }  
}
filter {
   if [type] == "apache-accesslog" {
    grok {
patterns_dir => "/usr/local/services/elk/logstash-5.0.1/logstash-patterns-core/patterns"
      match => { "message" => "%{COMMONAPACHELOG}" }
    }
geoip {
  source => "clientip"
  add_tag => [ "geoip" ]
}
if ([clientip] =~ "^100.109") {
          drop {}
       }
if([request] =~ "server-status"){
  drop {}
}
mutate {
        split => ["request", "?"]
    }
mutate {
add_field =>   {
                       "requesturl" => "%{[request][0]}"
                       "requestparams" => "%{[request][1]}"
               }
}
mutate {
        join => ["request", "?"]
    }
       date {
           match => ["timestamp", "dd/MMM/yyyy:HH:mm:ss Z"]
       }
   }   
  if [type] == "tomcat-accesslog" {
       grok {
               patterns_dir => ["/usr/local/services/elk/logstash-5.0.1/logstash-patterns-core/patterns"]
               match => { "message" => "%{TOMCATFBLOG}" }
       }
        if "fblive-api-web" in [application] {
               mutate { replace => { type => "tomcat-fblive-api-web-accesslog" } }
        }
        else if "fblive-web-www" in [application] {
               mutate { replace => { type => "tomcat-fblive-web-www-accesslog" } }
        }     
     }
}

output {
   if [type] == "apache-accesslog" {
       elasticsearch {
           template_overwrite => "true"
           hosts => ["127.0.0.1:9200"]
           index => "logstash-apache-accesslog-%{+YYYY.MM.dd}"
       }
   }   
  
   if [type] == "tomcat-fblive-api-web-accesslog" {
  elasticsearch {
   hosts => ["127.0.0.1:9200"]

  index => "tomcat-fblive-api-web-accesslog-%{+YYYY.MM.dd}"
}
       if [level] == "ERROR" {
          file {
                       path => "/root/elk/error_mail/%{+yyyyMMdd}/fblive-api-web%{+HH}.log"
               }
       }
   }
   if [type] == "tomcat-fblive-web-www-accesslog" {
      elasticsearch {
      hosts => ["127.0.0.1:9200"]

  index => "tomcat-fblive-web-www-accesslog-%{+YYYY.MM.dd}"
}
       if [level] == "ERROR" {
          file {
                       path => "/root/elk/error_mail/%{+yyyyMMdd}/fblive-web-www%{+HH}.log"
               }
       }
   }
   
}


[root@iZ25mh87ol1Z error_mail]# cat sendmail.sh
#!/bin/sh
#sendmail error log to someone

function sendErrorMail(){
file=/root/elk/error_mail/$(date -d '-8 hour'  +%Y%m%d/$1%H.log)
# echo $file

if [ -f "$file" ]; then
echo 'send mail'$file
mail -s '[error]'$1 [email protected],[email protected] < $file
mv $file $file.send
else
echo 'no file:'$file
fi
}

#end
sendErrorMail fblive-api-web
sendErrorMail fblive-web-www


[root@iZ25mh87ol1Z ~]# crontab  -l

00 10 * * *  /root/elk/elasticsearch-5.0.1/rm_es_tomcat_7_day_ago.sh delete_tomcat

*/5 * * * * /root/elk/error_mail/sendmail.sh

*/30 * * * * /root/elk/error_mail/stat15m.sh



Kibana
http://kibana.logstash.es/content/index.html   中文指南
https://github.com/chenryn/ELKstack-guide-cn/releases/tag/ELK     中文指南下下來看
[root@centos7 src]# tar zxf kibana-5.0.1-linux-x86_64.tar.gz
[root@centos7 src]# mv kibana-5.0.1-linux-x86_64 /usr/local/kibana

[root@centos7 src]# cd /usr/local/kibana/config/
[root@centos7 config]# ll
總用量 8
-rw-rw-r--. 1 spring spring 4426 12月 30 10:01 kibana.yml
[root@centos7 config]# vim kibana.yml
elasticsearch.url: "<a href="http://192.168.1.73:9200" "="">http://192.168.1.73:9200"

elasticsearch.username: "elastic"
elasticsearch.password: "changeme"


生產環境自己加了個可以加個nignx方向代理做訪問認證.


filebeat配置 (tomcat日誌有多行配置)
- input_type: log

 # Paths that should be crawled and fetched. Glob based paths.
 paths:
   - /root/apache/logs/access*
 document_type: apache-accesslog

-
 paths:
   - /root/tomcat/tomcat1/logs/fblive*
   - /root/data/../bin/logs/fblive*        
 document_type: tomcat-accesslog
 multiline:
   pattern: ^\[
   negate: true
   match: after



有需求來了, elk立功了!!!  
elasticdump 遷移與導出es數據


導出 搜索匹配 :request":"/redenv_AfterShare.ss 的數據
/root/software/node_modules/elasticdump/bin/elasticdump   --input=http://127.0.0.1:9200/logstash-apache-accesslog-2016.12.13   --output=logstash-apache-accesslog-2016.12.13.json   --searchBody '{"query": {"match":{"request":"/redenv_AfterShare.ss"}}}'   --type=data   --sourceOnly


只導出message                    
/root/software/node_modules/elasticdump/bin/elasticdump   --input=http://127.0.0.1:9200/logstash-apache-accesslog-2016.12.04  \
  --output=/root/software/node_modules/elasticdump/bin/apache_accesslog/logstash-apache-accesslog-2016-12-04.json \
--searchBody='{ "_source": "message", "query": {"match_all": {}} }'   --type=data


發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章