spark中如何使用logback日誌系統寫數據到es中

    spark運行環境在yarn上,環境依賴log4j的日誌系統,但是我們上線的應用採用logback日誌,與log4j衝突,我們又不可能動環境中的jar包,那改怎麼辦呢?看如下代碼

第一步:引入相關jar的maven依賴

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  <modelVersion>4.0.0</modelVersion>

  <groupId>com.suncreate</groupId>
  <artifactId>logback-adapter</artifactId>
  <version>1.0.0-SNAPSHOT</version>
  <packaging>jar</packaging>

  <name>logback-adapter</name>
  <url>http://maven.apache.org</url>

  <properties>
    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
  </properties>

  <dependencies>
    	<dependency>
		   <groupId>net.logstash.logback</groupId>
		   <artifactId>logstash-logback-encoder</artifactId>
		   <version>4.8</version>
		   <scope>runtime</scope>
		</dependency>
        <dependency>
            <groupId>com.internetitem</groupId>
            <artifactId>logback-elasticsearch-appender</artifactId>
            <version>1.6</version>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>org.slf4j</groupId>
            <artifactId>slf4j-api</artifactId>
            <version>1.7.5</version>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>org.slf4j</groupId>
            <artifactId>log4j-over-slf4j</artifactId>
            <version>1.7.6</version>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>ch.qos.logback</groupId>
            <artifactId>logback-core</artifactId>
            <version>1.0.11</version>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>ch.qos.logback</groupId>
            <artifactId>logback-classic</artifactId>
            <version>1.0.11</version>
            <scope>compile</scope>
        </dependency>
    <dependency>
      <groupId>junit</groupId>
      <artifactId>junit</artifactId>
      <version>3.8.1</version>
      <scope>test</scope>
    </dependency>
  </dependencies>
</project>

第二步:拷貝下面代碼到自己的工程中

package com.suncreate.bigdata.util;

import java.net.MalformedURLException;
import java.util.HashMap;
import java.util.Map;

import com.internetitem.logback.elasticsearch.ElasticsearchAppender;
import com.internetitem.logback.elasticsearch.config.ElasticsearchProperties;
import com.internetitem.logback.elasticsearch.config.HttpRequestHeader;
import com.internetitem.logback.elasticsearch.config.HttpRequestHeaders;
import com.internetitem.logback.elasticsearch.config.Property;

import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.LoggerContext;

/**
 * 
 * 獲取logback的日誌系統,將日誌數據寫入es數據庫
 * 
 * @author mgguan
 *
 */
public class LogUtil {

    /**
     * 
     * @param clazz     當前記錄日誌的class,不可以爲空
     * @param esAddress ip:port,不可以爲空
     * @return logback的Logger,內含es的appender
     */
    public static Logger getEsLogger(Class<?> clazz, String esAddress) {
        return getEsLogger(clazz, esAddress, "logs-%date{yyyy-MM-dd}");
    }

    /**
     * 
     * @param clazz     當前記錄日誌的class,不可以爲空
     * @param esAddress ip:port,不可以爲空
     * @param index     寫入es的索引,默認是logs-%date{yyyy-MM-dd}, 不可以爲空
     * @return logback的Logger,內含es的appender
     */
    public static Logger getEsLogger(Class<?> clazz, String esAddress, String index) {
        Map<String, Logger> map = new HashMap<String, Logger>();
        if (map.get(clazz.getName()) != null) {
            return map.get(clazz.getName());
        }

        LoggerContext logCtx = new LoggerContext();
        ElasticsearchAppender esAppender = new ElasticsearchAppender();
        esAppender.setContext(logCtx);
        esAppender.setName(clazz.getName());
        esAppender.setIndex(index);

        esAppender.setType("doc");
        esAppender.setLoggerName("es-logger");
        esAppender.setErrorLoggerName("errorLoggerName");
        esAppender.setMaxMessageSize(100);
        
        ElasticsearchProperties elasticsearchProperties = new ElasticsearchProperties();
        Property p1 = new Property();
        p1.setName("level");
        p1.setValue("%level");
        Property p2 = new Property();
        p2.setName("logger");
        p2.setValue("%logger");
        Property p3 = new Property();
        p3.setName("thread");
        p3.setValue("%thread");
        Property p4 = new Property();
        p4.setName("stacktrace");
        p4.setValue("%ex");
        elasticsearchProperties.addProperty(p1);
        elasticsearchProperties.addProperty(p2);
        elasticsearchProperties.addProperty(p3);
        elasticsearchProperties.addProperty(p4);
        esAppender.setProperties(elasticsearchProperties);

        try {
            esAppender.setUrl(String.format("http://%s/_bulk", esAddress));
        } catch (MalformedURLException e) {
            e.printStackTrace();
        }
        HttpRequestHeaders headers = new HttpRequestHeaders();
        HttpRequestHeader header = new HttpRequestHeader();
        header.setName("Content-Type");
        header.setValue("application/json");
        headers.addHeader(header);
        esAppender.setHeaders(headers);
        esAppender.start();

        Logger log = logCtx.getLogger(clazz);
        log.setAdditive(false);
        log.setLevel(Level.ALL);
        log.addAppender(esAppender);
        map.put(clazz.getName(), log);
        return log;
    }


}

第三步:採用如下代碼獲取log,採用此log記錄的日誌均可記錄到es

private static final ch.qos.logback.classic.Logger log = LogUtil.getEsLogger(SparkStreaming.class, "192.168.8.8:9200", "logs-test2-%date{yyyy-MM-dd}");

    

 

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章