整体结构图 :
1服务器2台:
1.1 (121.40.165.59):
Filebeat-6.3.2
Logstash-6.3.2
Kibana-6.3.2
1.2 (47.99.139.182):
ES -6.3.2
2搭建:
2.1 Filebeat:
官网下载压缩包,解压在 /usr/local/filebeat 目录下, 编辑目录下 filebeat.yml 文件:
filebeat.inputs:
- type: tcp
enabled: true
max_message_size: 10MiB
host: "172.16.238.204:5088"
output.logstash:
hosts: ["172.16.238.204:5044"]
注: type 这里设置为TCP 模式,host 是通讯地址 springboot服务同过此ip端口进filebeat ,output这里设置的是;输出向logstash,然后指明logstash的地址及端口
启动: 切换到filebeat根目录:执行 ./filebeat -e -c filebeat.yml -d "publish" , 执行台查看启动状态即可
2.2: Logstash:
官网下载安装包,安装目录在/usr/local/ logstash-6.3.2 , (logstash 需要jdk环境) bin 目录打开logstash 编辑 export JAVA_CMD="/usr/local/jdk1.8.0_141/bin" export JAVA_HOME="/usr/local/ jdk1.8.0_141/" ,切换到config 目录下 自定义一个config文件:
input {
beats{
#host => "172.16.238.204"
port => 5044
codec => json{
charset => "UTF-8"
}
}
# tcp {
# mode => "server"
# host => "172.16.238.204"
# port => 5600
# codec => json
# }
}
filter {
mutate{
remove_field => "host"
}
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
}
}
}
output {
elasticsearch {
hosts => ["47.99.139.182:9200"]
index => "%{[servicename]}-%{+YYYY.MM.dd}"
}
stdout { codec => json }
}
注:此处输入为beats模式指定好监听端口 及数据格式, output 输出向es ,index 为索引,格式是服务名-年月日
启动: 切换到logstash根目录 执行 ./bin/logstash -f config/tcplog.config
2.3: 安装Kibana 官网下载安装包 解压 目录在 /usr/local/kibana-6.3.2-linux-x86_64 ,切换到config 目录编辑
kibana.yml, 修改文件 server.port:5601 , server.host:0.0.0.0,
elasticsearch.url: “http://172.16.238.178:9200” ,切换到bin目录执行
nohup ./kibana &
3: springboot集成logback :
Resources 目录下新建logback-spring.xml 文件:
目前内容为:
bootstrap.yml 配置: logging:config: classpath:logback-spring.xml
Maven 依赖调整:
项目中要全局移除自带的logging 日志依赖,然后加入新的依赖包:
注:logback-spring.xml 配置中的ip及端口 是服务器的内网ip地址,本地启动无法访问,
测试后 Kibana控制台查看:
```java
```java
附:
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<!-- 移除嵌入式tomcat插件 -->
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-logging</artifactId>
</exclusion>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-tomcat</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.2.3</version>
</dependency>
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
<version>5.3</version>
</dependency>
Xml:
<?xml version="1.0" encoding="UTF-8"?>
<springProperty scope="context" name="springAppName"
source="spring.application.name"/>
<!-- 日志在工程中的输出位置 -->
<property name="LOG_FILE" value="${BUILD_FOLDER:-build}/${springAppName}"/>
<!-- 控制台的日志输出样式 -->
<property name="CONSOLE_LOG_PATTERN"
value="%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
<!-- 日志输出编码 -->
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>utf8</charset>
</encoder>
</appender>
<!-- 为logstash输出的JSON格式的Appender -->
<appender name="logstash"
class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>172.16.238.204:5088</destination>
<!-- 日志输出编码 -->
<encoder
class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>
<timeZone>UTC</timeZone>
</timestamp>
<pattern>
<pattern>
{
"severity": "%level",
"servicename": "serviceuser",
"trace": "%X{X-B3-TraceId:-}",
"span": "%X{X-B3-SpanId:-}",
"exportable": "%X{X-Span-Export:-}",
"pid": "${PID:-}",
"thread": "%thread",
"class": "%logger{40}",
"rest": "%message"
}
</pattern>
</pattern>
</providers>
</encoder>
</appender>
<!-- 日志输出级别 -->
<root level="INFO">
<appender-ref ref="console"/>
<appender-ref ref="logstash"/>
</root>
```