一、输入
1.1 日志路径设置
filebeat.inputs: - type: log enabled: true paths: - /var/log/*.log
1.2 多行合并设置
multiline.type: pattern multiline.pattern: '^\[[0-9]{4}-[0-9]{2}-[0-9]{2}' multiline.negate: true multiline.match: after
# 示例 [2015-08-24 11:49:14,389] Start new event [2015-08-24 11:49:14,395] Content of processing something [2015-08-24 11:49:14,399] End event
二、输出
2.1 elasticsearch设置
output.elasticsearch: hosts: ["https://myEShost:9200"] username: "elastic" password: "123456" index: "test-%{+yyyy.MM.dd}"
# 索引生命周期,设置false,会创建每日索引 setup.ilm.enabled: false
# 索引模块 setup.template.name: "filebeat" setup.template.pattern: "filebeat-*" setup.template.overwrite: false setup.template.settings: index.number_of_shards: 1 index.number_of_replicas: 1
2.2 kafka设置
output.kafka: hosts: ["kafka1:9092", "kafka2:9092", "kafka3:9092"] topic: '%{[fields.log_topic]}' partition.round_robin: reachable_only: false required_acks: 1 compression: gzip max_message_bytes: 1000000
三、内部队列存储
queue.mem: # 设置为最多缓冲4096个事件 events: 4096 # 默认情况下flush.min_events设置为2048,flush.timeout设置为1s flush.min_events: 512 flush.timeout: 5s queue.disk: max_size: 1GB