ELK 日誌收集模板

日誌文件格式:

<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="10 seconds" debug="true">
	<include resource="org/springframework/boot/logging/logback/defaults.xml"/>

	<conversionRule conversionWord="ip" converterClass="com.collmall.gaia.gfs.core.utils.IPAddressConverter" />
	<springProperty scope="context" name="spring_application_name" source="spring.application.name" />
	<springProperty scope="context" name="server_port" source="server.port" />

	<!-- appID | module |  dateTime | level | requestID | traceID | requestIP | userIP | serverIP | serverPort | processID | thread | location | detailInfo-->
	<property name="CONSOLE_LOG_PATTERN" value="%clr(${spring_application_name}){cyan}||%clr(%d{ISO8601}){faint}|%clr(%p)|%X{requestId}|%X{X-B3-TraceId:-}|%X{requestIp}|%X{userIp}|%ip|${server_port}|${PID}|%clr(%t){faint}|%clr(%.40logger{39}){cyan}.%clr(%method){cyan}:%L|%m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/>
	<property name="FILE_LOG_PATTERN" value="${spring_application_name}||%d{ISO8601}|%p|%X{requestId}|%X{X-B3-TraceId:-}|%X{requestIp}|%X{userIp}|%ip|${server_port}|${PID}|%t|%.40logger{39}.%method:%L|%m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/>
	<include resource="org/springframework/boot/logging/logback/console-appender.xml"/>

	<springProperty scope="context" name="spring_application_name" source="spring.application.name" />
	<springProperty scope="context" name="kafka_enabled" source="collmall.web.logging.kafka.enabled"/>
	<springProperty scope="context" name="kafka_broker" source="collmall.web.logging.kafka.broker"/>
	<springProperty scope="context" name="kafka_env" source="collmall.web.logging.kafka.env"/>

	<property name="LOG_DIR" value="../logs/${APP_ID}" />
	<appender name="APP" class="ch.qos.logback.core.rolling.RollingFileAppender">
		<file>${LOG_DIR}/app.log</file>
		<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
			<fileNamePattern>${LOG_DIR}/app.%d{yyyy-MM-dd}.log.gz</fileNamePattern>
			<maxHistory>30</maxHistory>
		</rollingPolicy>
		<encoder>
			<charset>UTF-8</charset>
			<pattern>${FILE_LOG_PATTERN}</pattern>
		</encoder>
	</appender>

	<appender name="EXTERNAL" class="ch.qos.logback.core.rolling.RollingFileAppender">
		<file>${LOG_DIR}/external.log</file>
		<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
			<fileNamePattern>${LOG_DIR}/external.%d{yyyy-MM-dd}.log.gz</fileNamePattern>
			<maxHistory>10</maxHistory>
		</rollingPolicy>
		<encoder>
			<charset>UTF-8</charset>
			<pattern>${FILE_LOG_PATTERN}</pattern>
		</encoder>
	</appender>

	<appender name="SQL" class="ch.qos.logback.core.rolling.RollingFileAppender">
		<file>${LOG_DIR}/sql.log</file>
		<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
			<fileNamePattern>${LOG_DIR}/sql.%d{yyyy-MM-dd}.log.gz</fileNamePattern>
			<maxHistory>10</maxHistory>
		</rollingPolicy>
		<encoder>
			<charset>UTF-8</charset>
			<pattern>${FILE_LOG_PATTERN}</pattern>
		</encoder>
	</appender>

	<appender name="KAFKA" class="com.github.danielwegener.logback.kafka.KafkaAppender">
		<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
			<pattern>${FILE_LOG_PATTERN}</pattern>
		</encoder>
		<topic>${kafka_env}applog_${spring_application_name}</topic>
		<keyingStrategy class="com.github.danielwegener.logback.kafka.keying.HostNameKeyingStrategy" />
		<deliveryStrategy class="com.github.danielwegener.logback.kafka.delivery.AsynchronousDeliveryStrategy" />
		<producerConfig>bootstrap.servers=${kafka_broker}</producerConfig>
		<!-- don't wait for a broker to ack the reception of a batch.  -->
		<producerConfig>acks=0</producerConfig>
		<!-- wait up to 1000ms and collect log messages before sending them as a batch -->
		<producerConfig>linger.ms=1000</producerConfig>
		<!-- even if the producer buffer runs full, do not block the application but start to drop messages -->
		<producerConfig>max.block.ms=0</producerConfig>
	</appender>

	<appender name="KAFKA_ASYNC" class="ch.qos.logback.classic.AsyncAppender">
		<appender-ref ref="KAFKA" />
	</appender>

	<root level="DEBUG">
		<appender-ref ref="EXTERNAL"/>
		<appender-ref ref="CONSOLE"/>
	</root>

	<logger name="com.collmall.evaluation.mapper.java" level="DEBUG" additivity="false">
		<appender-ref ref="SQL"/>
	</logger>

	<logger name="com.collmall" level="DEBUG" additivity="false">
		<appender-ref ref="APP"/>
		<if condition='"true".equals(property("kafka_enabled"))'>
			<then>
				<appender-ref ref="KAFKA_ASYNC"/>
			</then>
		</if>
	</logger>



</configuration>

配置文件

spring.application.name = evaluation_schedule
collmall.web.logging.kafka.enabled = true
collmall.web.logging.kafka.broker = kafka11.paas.net:9092, kafka12.paas.net:9092, kafka13.paas.net:9092
collmall.web.logging.kafka.env = prod

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章