日志发送到kafka 日志中心

通过集成logback 重写layout

XML

 

   <!-- 日志记录器,日期滚动记录 -->
    <appender name="syn" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <!-- 正在记录的日志文件的路径及文件名 -->
        <file>${syn}/log_syn.log</file>
        <!-- 日志记录器的滚动策略,按日期,按大小记录 -->
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <!-- 归档的日志文件的路径,例如今天是2017-09-21日志,当前写的日志文件路径为file节点指定,可以将此文件与file指定文件路径设置为不同路径,从而将当前日志文件或归档日志文件置不同的目录。
            			而2017-09-21的日志文件在由fileNamePattern指定。%d{yyyy-MM-dd}指定日期格式,%i指定索引 -->
            <fileNamePattern>${syn}/log-syn-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
            <!-- 除按日志记录之外,还配置了日志文件不能超过2M,若超过2M,日志文件会以索引0开始,  命名日志文件,例如log-error-2017-09-21.0.log -->
            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
                <maxFileSize>10MB</maxFileSize>
            </timeBasedFileNamingAndTriggeringPolicy>
        </rollingPolicy>
        <!-- 追加方式记录日志 -->
        <append>true</append>
        <encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
            <layout class="com.rhineetam.ag.config.SynMessageLayout" />
        </encoder>
    </appender>

 


import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.LayoutBase;
import cn.hutool.core.thread.ThreadUtil;
import cn.hutool.http.HttpUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.rhineetam.ag.constant.CommonConstant;
import com.rhineetam.ag.core.domain.TLInfo;
import com.rhineetam.ag.entity.LogEntity;
import com.rhineetam.ag.utils.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Component;

import java.text.MessageFormat;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.util.HashMap;
import java.util.Map;

/**
 * @Description
 * @Author LYL
 * @Date 2022/3/30 14:43
 */
@Component
public class SynMessageLayout  extends LayoutBase<ILoggingEvent> implements ApplicationContextAware {


    final Logger logger = LoggerFactory.getLogger(SynMessageLayout.class);

    static  ApplicationContext applicationContext;

    DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss:SSS");

    @Override
    public String doLayout(ILoggingEvent event) {
        //获取原始日志
        String oriLogMsg = event.getFormattedMessage();
        try {
            return insertUUID(event,oriLogMsg);
        } catch (Exception e) {
            e.printStackTrace();
            return oriLogMsg;
        }
    }


    /**
     * 日志增加唯一标识
     */
    public String insertUUID(ILoggingEvent event,String oriLogMsg) {
        String uuid = TLInfo.getUUID();
        oriLogMsg = MessageFormat.format("{0} {1} {2} - {3} : [ip:{4} 项目:{5}, url:{6}, 参数:{7}, uuid:{8}]: {9}",
                dtf.format(LocalDateTime.now()),event.getCallerData()[0].getClassName(),
                event.getCallerData()[0].getLineNumber(),event.getLevel(),
                TLInfo.getIp(),TLInfo.getProjectName(),
                TLInfo.getRequestUrl(),TLInfo.getParam(),
                uuid, oriLogMsg)+"\r\n";
        sendKafka(event.getLevel().levelStr,event.getFormattedMessage());
        return oriLogMsg;
    }

    /**
     * 发送日志至kafka
     * @param msg
     */
    public void sendKafka(String level,String msg){
        LogEntity log = new LogEntity(TLInfo.getRequestUrl(),
                TLInfo.getParam(),TLInfo.getUUID(),TLInfo.getProjectName(), level,msg,
                TLInfo.get().getExpand(), LocalDateTime.now().toInstant(ZoneOffset.of("+8")).toEpochMilli(),
                TLInfo.getIp(),System.getProperty("spring.profiles.active"),
                TLInfo.getType());
        try{
            Environment env = applicationContext.getBean(Environment.class);
            String httpUrl = env.getProperty("log.http.url");
            if(StringUtils.isNotEmpty(httpUrl)){
                Map<String,Object> map = new HashMap<>();
                map.put("topic",CommonConstant.KAFKA_LOG_TOPIC);
                map.put("message",log);
                System.out.println( JSONObject.toJSONString(map));
//                ThreadUtil.execAsync(()->HttpUtil.post(httpUrl, JSONObject.toJSONString(map)),false);
            }else{
                logger.error("log.http.url 尚未配置 日志发送失败");
            }
        }catch (Exception e){
            logger.error("发送日志记录出错:{}",e);
        }
    }

    @Override
    public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
        this.applicationContext = applicationContext;
    }
}

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章