filebeat.yml
 
filebeat.inputs:
 - type: log 
   paths:
    - /root/logs/*/*.log
   multiline.pattern: '^\d{4}\-\d{2}\-\d{2}' 
   multiline.negate: true 
   multiline.match: after 
   fields:
    index: 'server_log'
setup.ilm.enabled: false
setup.template.name: "java_logback_service_index_template"
setup.template.pattern: "java_logback_service_index_template-*"
setup.template.overwrite: true
setup.template.settings:
  index.number_of_shards: 1
output.elasticsearch:
  hosts: ["110.238.107.151:9001"]
  indices:
    - index: "server_log-%{+yyyy.MM.dd}"
      when.contains:
        fields:
          index: "server_log"
  pipeline: "test_java_log_pipeline"
  document_type: log 
  max_retries: 3 
processors: 
  - drop_fields:
      fields: ["log","host","input","agent","ecs","start_time"]
 
docker run
 
docker run -itd \
  --privileged=true \
  --user=root \
  --name=filebeat \
  --restart always \
  --network=my_network \
  -v /root/filebeat/filebeat.yml:/usr/share/filebeat/filebeat.yml:rw \
  -v /root/filebeat/data/:/usr/share/filebeat/data/:rw  \
  -v /root/logs/:/root/logs/:rw  \
  -v /root/xiaoye_worker/logs/:/root/xiaoye_worker/logs/:rw  \
  docker.elastic.co/beats/filebeat:6.4.2
 
Elasticsearch 模版和pipeline 设置
 
GET _template/java_logback_service_index_template
DELETE _template/java_logback_service_index_template
PUT _template/java_logback_service_index_template
{
  "order": 1,
  "index_patterns": [
    "java_log-*"
  ],
  "settings": {
    "number_of_shards": 1,
    "number_of_replicas": 1
  },
  "mappings": {
    "type_name":{
      "properties": {
      "app_name": {
        "type": "keyword"
      },
      "trance_id": {
        "type": "keyword"
      },
      "log_level": {
        "type": "keyword"
      },
      "thread": {
        "type": "keyword"
      },
      "class_line": {
        "type": "keyword"
      },
      "message": {
        "type": "text",
        "analyzer": "ik_max_word",
        "search_analyzer": "ik_smart",
        "norms": false
      },
      "timestamp": {
        "type": "date"
      }
    }
    }
  },
  "aliases": {}
}
GET _ingest/pipeline/test_java_log_pipeline
DELETE /_ingest/pipeline/test_java_log_pipeline
PUT /_ingest/pipeline/test_java_log_pipeline
{
  "description": "test_java_log_pipeline",
  "processors": [
    {
      "grok": {
        "field": "message",
        "patterns": [
          """%{TIMESTAMP_ISO8601:timestamp} %{DATA:app_name} %{LOGLEVEL:log_level} %{DATA:thread} \[%{DATA:trance_id}\] %{DATA:class_line} %{GREEDYDATA:message}
          """
        ],
        "pattern_definitions": {
          "ALL_CODE": "(\n)*"
        }
      },
      "remove": {
        "field": "@timestamp"
      }
    },
    {
      "date": {
        "field": "timestamp",
        "formats": [
          "yyyy-MM-dd HH:mm:ss.SSS"
        ],
        "timezone": "Asia/Shanghai",
        "target_field": "timestamp"
      },
      "remove": {
        "field": "@timestamp"
      }
    }
  ]
}
 
测试 gork
 
POST _ingest/pipeline/_simulate
{
  "pipeline": {
    "description": "timestamp pipeline",
    "processors": [
      {
        "grok": {
          "field": "message",
          "patterns": [
            """%{TIMESTAMP_ISO8601:timestamp} %{DATA:app_name} %{LOGLEVEL:log_level} %{DATA:thread} \[%{DATA:trance_id}\] %{DATA:class_line} %{GREEDYDATA:message}"""
          ]
        }
      },
      {
        "date": {
          "field": "timestamp",
          "formats": [
            "yyyy-MM-dd HH:mm:ss.SSS"
          ],
          "timezone": "Asia/Shanghai",
          "target_field": "create_time"
        },
        "remove": {
          "field": "timestamp"
        }
      }
    ]
  },
  "docs": [
    {
      "_index": "syne_sys_log",
      "_id": "id",
      "_source": {
        "message": "2024-10-02 21:11:20.083 xiaoye-scheduler INFO  scheduling-1 [] com.xiaoye.orion.scheduler.service.SchedulerService:83 now: 2024-10-02T21:11:20.083, size: 0, id:1727874680011, startTime:1727874680011"
      }
    }
  ]
}
 
Entity
 
@Data
@SuperBuilder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
@Document(indexName = "server_log-*")
public class ServerLogEs {
    @Field(name = "log_level", type = FieldType.Keyword)
    @JsonProperty("log_level")
    private String logLevel;
    
    @Field(name = "thread", type = FieldType.Keyword)
    private String thread;
    
    @Field(name = "app_name", type = FieldType.Keyword)
    @JsonProperty("app_name")
    private String appName;
    
    @Field(name = "trance_id", type = FieldType.Keyword)
    @JsonProperty("trance_id")
    private String tranceId;
    
    @Field(name = "class_line", type = FieldType.Keyword)
    @JsonProperty("class_line")
    private String classLine;
    @Field(type = FieldType.Text, analyzer = "ik_smart", searchAnalyzer = "ik_smart")
    private String message;
    
    @Field(name = "@create_time")
    @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
    @DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
    private Date createTime;
}
 
logback.xml
 
<?xml version="1.0" encoding="UTF-8" ?>
<configuration>
    <springProperty scope="context" name="logPath" source="log.path" defaultValue="/root/logs/${APP_NAME}/"/>
     
    <springProperty scope="context" name="APP_NAME" source="spring.application.name" defaultValue="xiaoye-admin"/>
    <include resource="org/springframework/boot/logging/logback/defaults.xml"/>
    <jmxConfigurator/>
    <appender name="consoleLog" class="ch.qos.logback.core.ConsoleAppender">
        <layout class="ch.qos.logback.classic.PatternLayout">
            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} ${APP_NAME} %-5level %thread [%X{traceId}] %class:%line %msg%n
            </pattern>
        </layout>
    </appender>
    
    <appender name="fileInfoLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <filter class="ch.qos.logback.classic.filter.LevelFilter">
            <level>ERROR</level>
            <onMatch>DENY</onMatch>
            <onMismatch>ACCEPT</onMismatch>
        </filter>
        <encoder>
            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} ${APP_NAME} %-5level %thread [%X{traceId}] %class:%line %msg%n
            </pattern>
            <immediateFlush>false</immediateFlush>
        </encoder>
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <fileNamePattern>${logPath}/info.%d{yyyy-MM-dd}.log</fileNamePattern>
            <maxHistory>7</maxHistory>
        </rollingPolicy>
    </appender>
    <appender name="fileErrorLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
            <level>ERROR</level>
        </filter>
        <encoder>
            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} ${APP_NAME} %-5level %thread [%X{traceId}] %class:%line %msg%n
            </pattern>
        </encoder>
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <fileNamePattern>${logPath}/error.%d{yyyy-MM-dd}.log</fileNamePattern>
            <maxHistory>7</maxHistory>
        </rollingPolicy>
    </appender>
    <logger name="org.springframework" level="ERROR"/>
    <logger name="org.xnio" level="ERROR"/>
    <logger name="io.undertow" level="ERROR"/>
    <logger name="com.netflix.discovery" level="ERROR"/>
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    <root level="info">
        <appender-ref ref="consoleLog"/>
        <appender-ref ref="fileInfoLog"/>
        <appender-ref ref="fileErrorLog"/>
    </root>
</configuration>