备注:近期在梳理7.1版本的日志采集和监控技术栈,用于形成标准化手册,故简单记录分享(不额外排版,只体现思想)
架构:
kakfa:日志中转 集群模式
elasticseach:索引 集群模式
kibana: 索引分析 单节点 rpm
filebeat:日志搜集 单节点 rpm
zabbix:logstash处理兜底error 单节点
prometheus+grafana+alertmanager+fluentd 测试中
架构简述:
filebeat>kafka>logstash>es>kibana
>zabbix>timescale>grafana
pod >prometheus>grafana(下一篇)
案例1:(搜集message日志)
#filebeat配置:
[root@node7 filebeat]# cat /etc/filebeat/filebeat.yml
filebeat.inputs:
- type: log
enabled: true
paths:
- /var/log/messages
fields:
logtype: message
tail_files: true
logIndex: message
name: node7
#output.logstash:
# hosts: ["node8:5044"]
output.kafka:
enable: true
hosts: ["node7:9092","node8:9092","node9:9092"]
topic: "%{[fields.logIndex]}"
#logstash配置
[root@node7 filebeat]#
[root@node8 conf.d]# cat zabbix/zabbix.conf
input {
file {
path => ["/var/log/secure"]
start_position => "beginning"
}
}
filter {
grok {
match => { "message" => "%{SYSLOGTIMESTAMP:message_timestamp} %{SYSLOGHOST:hostname} %{DATA:message_program}(?:\[%{POSINT:message_pid}\])?: %{GREEDYDATA:message_content}" }
}
mutate {
add_field => ["[zabbix_key]","oslogs"]
add_field => ["[zabbix_host]","node8"]
remove_field => ["@version","message"]
}
date {
match => [ "message_timestamp","MMM d HH:mm:ss","MMM dd HH:mm:ss","ISO8601"]
}
}
output {
elasticsearch{
index => "oslogs-%{+YYYY.MM.dd}"
hosts => ["node7:9200","node8:9200","node9:9200"]
user => "elastic"
password => "renzhiyuan"
sniffing => false
}
if [message_content] =~ /(ERR|error|ERROR|Failed)/ {
zabbix {
zabbix_host =>"[zabbix_host]"
zabbix_key =>"[zabbix_key]"
zabbix_server_host =>"node7"
zabbix_server_port =>"20008"
zabbix_value =>"message_content"
}
}
#stdout{ codec => rubydebug }
}
案例2:(搜集secure日志+kafka日志+应用日志+匹配hosts正则+zabbix告警)
#filebeat配置:
[root@node8 conf.d]# cat /etc/filebeat/filebeat.yml
filebeat.inputs:
- type: log
enabled: true
paths:
- /renzhiyuan/hermes-api.log
fields:
logtype: hermes-api-node8
tail_files: true
logIndex: hermesapi
multiline:
pattern: '^\['
negate: true
match: after
- type: log
enabled: true
paths:
- /app/kafka/logs/server.log
fields:
logtype: kafka-serverlog-node8
tail_files: true
logIndex: kafka-serverlog
name: node8
#output.logstash:
# hosts: ["node8:5044"]
output.kafka:
enable: true
hosts: ["node7:9092","node8:9092","node9:9092"]
topic: "%{[fields.logIndex]}"
[root@node8 conf.d]#
[root@node8 conf.d]# cat zabbix/zabbix.conf
input {
file {
path => ["/var/log/secure"]
start_position => "beginning"
}
}
filter {
grok {
match => { "message" => "%{SYSLOGTIMESTAMP:message_timestamp} %{SYSLOGHOST:hostname} %{DATA:message_program}(?:\[%{POSINT:message_pid}\])?: %{GREEDYDATA:message_content}" }
}
mutate {
add_field => ["[zabbix_key]","oslogs"]
add_field => ["[zabbix_host]","node8"]
remove_field => ["@version","message"]
}
date {
match => [ "message_timestamp","MMM d HH:mm:ss","MMM dd HH:mm:ss","ISO8601"]
}
}
output {
elasticsearch{
index => "oslogs-%{+YYYY.MM.dd}"
hosts => ["node7:9200","node8:9200","node9:9200"]
user => "elastic"
password => "renzhiyuan"
sniffing => false
}
if [message_content] =~ /(ERR|error|ERROR|Failed)/ {
zabbix {
zabbix_host =>"[zabbix_host]"
zabbix_key =>"[zabbix_key]"
zabbix_server_host =>"node7"
zabbix_server_port =>"20008"
zabbix_value =>"message_content"
}
}
#stdout{ codec => rubydebug }
}
#效果图: