- log4j2,kafka異步輸出的優勢這裏不過多贅述
- 準備步驟
–去掉springboot默認依賴,引入Log4j2
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-log4j2</artifactId>
</dependency>
–項目中引入kafka相關依賴
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>2.5.0</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>2.13.1</version>
</dependency>
- 編寫日誌配置文件,springboot默認識別的配置文件名:log4j2-spring.xml,若更改文件名,需在application.yaml文件中配置
logging:
path: logs/
config: log4j2-dev.xml
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<properties>
<property name="LOG_HOME">${LOG_PATH}</property>
<Property name="KAFKA_SERVERS">10.210.100.17:9092</Property>>
<property name="FILE_PATTERN">{"timeMillis":"%d","thread":"%t","threadId":"%T","traceId":"%X{X-B3-TraceId}","level":"%-5p","className":"%c{1.}.%M(%L)","message":"%m","exception":"%rEx"}%n</property>
<property name="CONSOLE_PATTERN">%d %style{[%t]-[%T]}{Cyan} %style{[%X{X-B3-TraceId}]}{Magenta}--%highlight{%-5p}-%c{1.}.%M(%L)-%m-%highlight{%rEx}%n</property>
</properties>
<Appenders>
<Console name="Console" target="SYSTEM_OUT" ignoreExceptions="true">
<PatternLayout pattern="${FILE_PATTERN}" disableAnsi="false" noConsoleNoAnsi="false"/>
</Console>
<Kafka name="Kafka" topic="log-topic" ignoreExceptions="false">
<PatternLayout pattern="${CONSOLE_PATTERN}" disableAnsi="false" noConsoleNoAnsi="false"/>
<Property name="bootstrap.servers">${KAFKA_SERVERS}</Property>
<Property name="timeout.ms">4000</Property>
</Kafka>
<RollingFile name="INFO_ROLLING_FILE"
fileName="${LOG_HOME}/${fileName}-${date:yyyy-MM-dd}-info.log"
filePattern="${LOG_HOME}/${fileName}/${date:yyyy-MM}/info-%d{yyyy-MM-dd}-%i.log.gz" ignoreExceptions="true">
<!--ThresholdFilter :日誌輸出過濾-->
<!--level="info" :日誌級別,onMatch="ACCEPT" :級別在info之上則接受,onMismatch="DENY" :級別在info之下則拒絕-->
<ThresholdFilter level="info" onMatch="ACCEPT" onMismatch="DENY"/>
<PatternLayout pattern="${FILE_PATTERN}" disableAnsi="false" noConsoleNoAnsi="false"/>
<Policies>
<TimeBasedTriggeringPolicy interval="4" modulate="true" />
<SizeBasedTriggeringPolicy size="100 MB"/>
</Policies>
<!-- DefaultRolloverStrategy屬性如不設置,則默認爲最多同一文件夾下7個文件,這裏設置了20 -->
<DefaultRolloverStrategy max="20"/>
</RollingFile>
<RollingFile name="WARN_ROLLING_FILE"
fileName="${LOG_HOME}/${fileName}-${date:yyyy-MM-dd}-warn.log"
filePattern="${LOG_HOME}/${fileName}/${date:yyyy-MM}/warn-%d{yyyy-MM-dd}-%i.log.gz" ignoreExceptions="true">
<ThresholdFilter level="warn" onMatch="ACCEPT" onMismatch="DENY"/>
<!-- <JsonLayout compact="true" eventEol = "true"/>-->
<PatternLayout pattern="${FILE_PATTERN}" disableAnsi="false" noConsoleNoAnsi="false"/>
<Policies>
<TimeBasedTriggeringPolicy interval="4" modulate="true" />
<SizeBasedTriggeringPolicy size="100 MB"/>
</Policies>
<DefaultRolloverStrategy max="20"/>
</RollingFile>
<RollingFile name="ERROR_ROLLING_FILE"
fileName="${LOG_HOME}/${fileName}-${date:yyyy-MM-dd}-error.log"
filePattern="${LOG_HOME}/${fileName}/${date:yyyy-MM}/error-%d{yyyy-MM-dd}-%i.log.gz" ignoreExceptions="true">
<ThresholdFilter level="error" onMatch="ACCEPT" onMismatch="DENY"/>
<!-- <JsonLayout compact="true" complet="true" />-->
<PatternLayout pattern="${FILE_PATTERN}" disableAnsi="false" noConsoleNoAnsi="false"/>
<Policies>
<TimeBasedTriggeringPolicy interval="4" modulate="true" />
<SizeBasedTriggeringPolicy size="100 MB"/>
</Policies>
<DefaultRolloverStrategy max="20"/>
</RollingFile>
<RollingFile name="FAIL_OVER_KAFKA_FILE"
fileName="${LOG_HOME}/${fileName}-${date:yyyy-MM-dd}-kafka.log"
filePattern="${LOG_HOME}/${fileName}/${date:yyyy-MM}/kafka-%d{yyyy-MM-dd}-%i.log.gz" ignoreExceptions="true">
<ThresholdFilter level="INFO" onMatch="ACCEPT" onMismatch="DENY"/>
<PatternLayout pattern="${FILE_PATTERN}" disableAnsi="false" noConsoleNoAnsi="false"/>
<Policies>
<TimeBasedTriggeringPolicy interval="4" modulate="true" />
<SizeBasedTriggeringPolicy size="100 MB"/>
</Policies>
<!-- DefaultRolloverStrategy屬性如不設置,則默認爲最多同一文件夾下7個文件,這裏設置了20 -->
<DefaultRolloverStrategy max="20"/>
</RollingFile>
<Failover name="Failover" primary="Kafka" retryIntervalSeconds="600">
<Failovers>
<AppenderRef ref="FAIL_OVER_KAFKA_FILE"/>
<!-- <AppenderRef ref="Console"/>-->
</Failovers>
</Failover>
<Async name="AsyncKafka">
<AppenderRef ref="Failover"/>
</Async>
</Appenders>
<Loggers>
<!--過濾掉spring和mybatis的一些無用的DEBUG信息-->
<!--Logger節點用來單獨指定日誌的形式,name爲包路徑,比如要爲org.springframework包下所有日誌指定爲INFO級別等。 -->
<logger name="org.springframework" level="INFO"></logger>
<logger name="org.mybatis" level="INFO"></logger>
<logger name="org.apache.kafka" level="INFO" /> <!-- avoid recursive logging -->
<Root level="info">
<AppenderRef ref="Console"/>
<AppenderRef ref="AsyncKafka"/>
<AppenderRef ref="WARN_ROLLING_FILE"/>
<AppenderRef ref="INFO_ROLLING_FILE"/>
<AppenderRef ref="ERROR_ROLLING_FILE"/>
</Root>
</Loggers>
</configuration>
說明:
timeout.ms 爲kafka宕機後返回異常的時間,返回異常即觸發Failover機制,默認時間30S.返回異常前會阻塞當前請求,所以此時間可以設置小一點。
<JsonLayout compact="true" eventEol = "true"/>
能夠將輸出的日誌格式化成JSON,但是會導致PatternLayout失效。不好自定義輸出內容,所以此處自定義JSON格式輸出。