1. Collection scheme

Spring generates logs through LogBack, then generates structured Json log data through one of Logback’s TCP appenders and sends the log data directly to the TCP port that Logstash listens on. Logstash receives it and sends it to Elasticsearch

2, in actual combat

2.1 SpringBoot and Logback configuration

2.1.1 Core Dependencies

Implementation ".net logstash. Logback: logstash logback - encoder: 6.2 "compileOnly" org. Projectlombok: lombok. "" annotationProcessor "org.projectlombok:lombok"Copy the code

2.1.2 application. Yml

spring:
  profiles:
    active: ${SPRING_ACTIVE:dev}

  application:
    name: elk-server

custom:
  logstash_uri: 127.0. 01.: 4660
  appender_ref: Tcp_Logstash
Copy the code

2.1.3 logback – spring. XML

  • See this link for detailed configuration instructions of the Logstash appender

      
<configuration>
    <! -- Color log dependent render class -->
    <conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter"/>
    <conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter"/>
    <conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>

    <! -- Custom attributes -->
    <property name="CONSOLE_LOG_PATTERN"
              value="[%clr(%X{traceId:-}){yellow}] %clr(%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}}){faint} % % the CLR (${LOG_LEVEL_PATTERN: - 5 p}) % CLR (${PID: -}) {magenta} % CLR (-) {abbreviation} % CLR ([% 15.15 t]) {abbreviation} % the CLR (% 40.40 logger {39}) CLR (:) {abbreviation} {cyan} % % m % n ${wEx LOG_EXCEPTION_CONVERSION_WORD: - %}"/>

    <! Get spring context properties -->
    <springProperty scope="local" name="App_Name" source="spring.application.name"
                    defaultValue="demo-server"/>
    <springProperty scope="local" name="Appender_Ref" source="custom.appender_ref"
                    defaultValue="Console"/>
    <springProperty scope="local" name="Logstash_Uri" source="custom.logstash_uri"/>

    <! Console appender -->
    <appender name="Console" class="ch.qos.logback.core.ConsoleAppender">
        <encoder>
            <pattern>${CONSOLE_LOG_PATTERN}</pattern>
            <charset>UTF-8</charset>
        </encoder>
    </appender>

 
    <! -- Logstash appender -->
     <appender name="Tcp_Logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
         <keepAliveDuration>5 minutes</keepAliveDuration>
         <! -- Logstash address -->
         <destination>${Logstash_Uri}</destination>
         <! -- Encoder must be configured -->
         <encoder  charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
             <! -- Custom timestamp format: YYYY-MM-DD 'T'HH: MM :ss.SSS<-->
             <timestampPattern>yyyy-MM-dd HH:mm:ss.SSS</timestampPattern>
             <! -- Set the default field and its name -->
             <fieldNames>
                 <timestamp>time</timestamp>
                 <message>msg</message>
                 <logger>class</logger>
                 <! -- Set to [ignore] to ignore this standard field -->
                 <version>[ignore]</version>
                 <thread>[ignore]</thread>
                 <levelValue>[ignore]</levelValue>
             </fieldNames>
             <! -- Set the MDC field name -->
             <includeMdcKeyName>traceId</includeMdcKeyName>
             <! -- Custom fields -->
             <customFields>{"appName": "${App_Name}"}</customFields>
             <! Set logger name length -->
             <shortenedLoggerNameLength>40</shortenedLoggerNameLength>
         </encoder>
     </appender>

    <! -- Control frame output log -->
    <logger name="org.slf4j" level="INFO"/>
    <logger name="springfox" level="INFO"/>
    <logger name="io.swagger" level="INFO"/>
    <logger name="org.springframework" level="INFO"/>
    <logger name="org.hibernate.validator" level="INFO"/>

    <! Define test environment log levels -->
    <springProfile name="dev">
        <root level="INFO">
            <appender-ref ref="Console" />
        </root>
    </springProfile>

    <! Define the online environment log level -->
    <springProfile name="prod">
        <root level="INFO">
            <appender-ref ref="${Appender_Ref}" />
            <appender-ref ref="Console"/>
        </root>
    </springProfile>

</configuration>
Copy the code

2.2 Logstash configuration

1. Create a conf file vim tcp_to_es

// Listen on TCP port 4660 to receive data
input {
  tcp {
    host => "localhost"
    port => 4660
    mode => "server"
    tags => ["burukeyou"]
    codec => json_lines
  }
}

output {
  // output to ES
  elasticsearch {
    // Output es address
    hosts => ["http://localhost:9200"]
    Log -'appName'-' current date 'is automatically created if no index exists
    index => "log-%{[appName]}-%{+YYYY.MM.dd}"
    #user => "elastic"
    #password => "changeme"
  }
  
  // Output to the Logstash console in rubydebug format
  stdout { codec => rubydebug }
}
Copy the code

2. Start the logstash

  • After startup, one is created in ES by defaultlogstash-*Index template
bin/logstash -f ../config/tcp_to_es.conf
Copy the code

3. Start SpringBoot and print logs

@Slf4j
@Component
public class LogCreateListener implements ApplicationListener<ApplicationStartedEvent> {

    @Override
    public void onApplicationEvent(ApplicationStartedEvent event) {
        for (int i = 0; i < 10; i++) {
            log.info("Log message: {}", i); }}}Copy the code

2. Kibana observation log

2.1 Creating an Index Mode

Set the index schema name

This section describes how to set the time field so that you can view logs by time

Click Discover and select index mode aslog-elk-server-*You can see all the index log data contained in this index mode

3. Kibana Visualization

1, click the Visualize Library list, and click Create Visualization

2. Select the diagram to create

3. Configuration diagram

  • Click save to save the configuration