Linux部署Filebeat

1 下载

[root@localhost ~]# cd /home/elk

1.1 ELK7.8.1

[root@localhost elk]# wget https://artifacts.elastic.co/downloads/logstash/logstash-7.8.1.tar.gz
[root@localhost elk]# wget https://artifacts.elastic.co/downloads/kibana/kibana-7.8.1-linux-x86_64.tar.gz
[root@localhost elk]# wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-7.8.1-linux-x86_64.tar.gz
[root@localhost elk]# wget https://artifacts.elastic.co/downloads/elasticsearch/filebeat-7.8.1-linux-x86_64.tar.gz

2 Filebeat安装

2.1 解压

[root@localhost elk]# tar -zxvf filebeat-7.8.1-linux-x86_64.tar.gz -C /home/elk/
[root@localhost elk]# cd filebeat-7.8.1-linux-x86_64/

2.2 修改配置

[root@localhost filebeat-7.8.1-linux-x86_64]# vi filebeat.yml

# ============================== Filebeat inputs ===============================

filebeat.inputs:

# Each - is an input. Most options can be set at the input level, so
# you can use different inputs for various configurations.
# Below are the input specific configurations.

- type: log

  # Change to true to enable this input configuration.
  enabled: true

  # Paths that should be crawled and fetched. Glob based paths.
  paths:
    #- /var/log/*.log
    - /home/logs/*.log
    #- c:\programdata\elasticsearch\logs\*

.....

# -------------------------------- Kafka Output --------------------------------
output.kafka:
  # initial brokers for reading cluster metadata
  # hosts: ["kafka1:9092", "kafka2:9092", "kafka3:9092"]
  hosts: ["192.168.56.13:9092"]

  # message topic selection + partitioning
  topic: 'gsdss-test'
  partition.round_robin:
    reachable_only: false

  required_acks: 1
  compression: gzip
  max_message_bytes: 1000000

# ---------------------------- Elasticsearch Output ----------------------------
#output.elasticsearch:
  # Array of hosts to connect to.
  #hosts: ["localhost:9200"]

  # Protocol - either `http` (default) or `https`.
  #protocol: "https"

  # Authentication credentials - either API key or username/password.
  #api_key: "id:api_key"
  #username: "elastic"
  #password: "changeme"

2.3 创建Kafka的Topic

#使用kafka-topics.sh创建topic
bin/kafka-topics.sh --create --zookeeper 192.168.56.13:2181 --replication-factor 1 --partitions 1 --topic gsdss-test

2.4 启动

#后台启动
[root@localhost filebeat-7.8.1-linux-x86_64]# nohup ./filebeat -c ./filebeat.yml &
#查看控制台
[root@localhost filebeat-7.8.1-linux-x86_64]# tail -f nohup.out

#关闭,通过发送SIGTERM给Logstash进程来停止它
[root@localhost logstash-7.6.2]# kill -15 {pid}
上一篇:服务端应用程序 文件夹结构


下一篇:ELK+Kafa+Filebeat集群安装部署