日誌發送到kafka 日誌中心

通過集成logback 重寫layout

XML

 

   <!-- 日誌記錄器,日期滾動記錄 -->
    <appender name="syn" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <!-- 正在記錄的日誌文件的路徑及文件名 -->
        <file>${syn}/log_syn.log</file>
        <!-- 日誌記錄器的滾動策略,按日期,按大小記錄 -->
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <!-- 歸檔的日誌文件的路徑,例如今天是2017-09-21日誌,當前寫的日誌文件路徑爲file節點指定,可以將此文件與file指定文件路徑設置爲不同路徑,從而將當前日誌文件或歸檔日誌文件置不同的目錄。
            			而2017-09-21的日誌文件在由fileNamePattern指定。%d{yyyy-MM-dd}指定日期格式,%i指定索引 -->
            <fileNamePattern>${syn}/log-syn-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
            <!-- 除按日誌記錄之外,還配置了日誌文件不能超過2M,若超過2M,日誌文件會以索引0開始,  命名日誌文件,例如log-error-2017-09-21.0.log -->
            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
                <maxFileSize>10MB</maxFileSize>
            </timeBasedFileNamingAndTriggeringPolicy>
        </rollingPolicy>
        <!-- 追加方式記錄日誌 -->
        <append>true</append>
        <encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
            <layout class="com.rhineetam.ag.config.SynMessageLayout" />
        </encoder>
    </appender>

 


import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.LayoutBase;
import cn.hutool.core.thread.ThreadUtil;
import cn.hutool.http.HttpUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.rhineetam.ag.constant.CommonConstant;
import com.rhineetam.ag.core.domain.TLInfo;
import com.rhineetam.ag.entity.LogEntity;
import com.rhineetam.ag.utils.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Component;

import java.text.MessageFormat;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.util.HashMap;
import java.util.Map;

/**
 * @Description
 * @Author LYL
 * @Date 2022/3/30 14:43
 */
@Component
public class SynMessageLayout  extends LayoutBase<ILoggingEvent> implements ApplicationContextAware {


    final Logger logger = LoggerFactory.getLogger(SynMessageLayout.class);

    static  ApplicationContext applicationContext;

    DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss:SSS");

    @Override
    public String doLayout(ILoggingEvent event) {
        //獲取原始日誌
        String oriLogMsg = event.getFormattedMessage();
        try {
            return insertUUID(event,oriLogMsg);
        } catch (Exception e) {
            e.printStackTrace();
            return oriLogMsg;
        }
    }


    /**
     * 日誌增加唯一標識
     */
    public String insertUUID(ILoggingEvent event,String oriLogMsg) {
        String uuid = TLInfo.getUUID();
        oriLogMsg = MessageFormat.format("{0} {1} {2} - {3} : [ip:{4} 項目:{5}, url:{6}, 參數:{7}, uuid:{8}]: {9}",
                dtf.format(LocalDateTime.now()),event.getCallerData()[0].getClassName(),
                event.getCallerData()[0].getLineNumber(),event.getLevel(),
                TLInfo.getIp(),TLInfo.getProjectName(),
                TLInfo.getRequestUrl(),TLInfo.getParam(),
                uuid, oriLogMsg)+"\r\n";
        sendKafka(event.getLevel().levelStr,event.getFormattedMessage());
        return oriLogMsg;
    }

    /**
     * 發送日誌至kafka
     * @param msg
     */
    public void sendKafka(String level,String msg){
        LogEntity log = new LogEntity(TLInfo.getRequestUrl(),
                TLInfo.getParam(),TLInfo.getUUID(),TLInfo.getProjectName(), level,msg,
                TLInfo.get().getExpand(), LocalDateTime.now().toInstant(ZoneOffset.of("+8")).toEpochMilli(),
                TLInfo.getIp(),System.getProperty("spring.profiles.active"),
                TLInfo.getType());
        try{
            Environment env = applicationContext.getBean(Environment.class);
            String httpUrl = env.getProperty("log.http.url");
            if(StringUtils.isNotEmpty(httpUrl)){
                Map<String,Object> map = new HashMap<>();
                map.put("topic",CommonConstant.KAFKA_LOG_TOPIC);
                map.put("message",log);
                System.out.println( JSONObject.toJSONString(map));
//                ThreadUtil.execAsync(()->HttpUtil.post(httpUrl, JSONObject.toJSONString(map)),false);
            }else{
                logger.error("log.http.url 尚未配置 日誌發送失敗");
            }
        }catch (Exception e){
            logger.error("發送日誌記錄出錯:{}",e);
        }
    }

    @Override
    public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
        this.applicationContext = applicationContext;
    }
}

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章