查看日志场景
- 接口通过网关,访问服务1
- 接口通过网关,访问服务1,服务1访问服务2
- 定时任务,访问服务1
实现逻辑过程
- HTTP接口请求经过网关时,利用过滤器,将生成的traceId加到到RequestHeader中
- 通过网关请求到服务中,利用MVC拦截器取出Header中的traceId,并且将traceId值使用Log中MDC类写入到日志中。
- 服务1,通过Feign请求其他服务之前,取出MDC类中的traceId赋值到RequestHeader中,被请求服务使用2中的方式取出traceId并记录到日志中。
- 服务器安装filebeat(或其他日志收集软件)收集日志,发送给ElasticSearch。
- 通过Kiabana查看日志内容。通过网关或其他方式定位到存在问题的请求中的traceId,通过traceId查看请求内的所有日志
相关代码
Zuul网关部分代码
TracePreFilter.java 过滤器
生成traceId,将traceId加入到RequestHeader中,带入到下游请求中。
@Component
public class TracePreFilter extends ZuulFilter {
@Override
public String filterType() {
return "pre";
}
@Override
public int filterOrder() {
return -1;
}
@Override
public boolean shouldFilter() {
return true;
}
@Override
public Object run() throws ZuulException {
String traceIdVal = LogHelper.getTraceId();
MDC.put(LogCollectionConstants.traceId, traceIdVal);
RequestContext ctx = RequestContext.getCurrentContext();
ctx.addZuulRequestHeader(LogCollectionConstants.traceId, traceIdVal);
return null;
}
}
LogFilter.java 过滤器
记录请求的详细信息,请求参数,返回值,时长等信息
@Component
@Slf4j
public class LogFilter extends ZuulFilter {
public static final String START_TIME_KEY = "start_time";
@Override
public String filterType() {
return FilterConstants.POST_TYPE;
}
@Override
public int filterOrder() {
return 0;
}
@Override
public boolean shouldFilter() {
return true;
}
@Override
public Object run() {
try {
HttpServletRequest request = RequestContext.getCurrentContext().getRequest();
RequestContext.getCurrentContext().set(START_TIME_KEY, System.currentTimeMillis());
HttpEntity httpEntity = new HttpEntity();
httpEntity.setMethod(request.getMethod());
httpEntity.setUrl(request.getRequestURI());
httpEntity.setIp(request.getRemoteAddr());
HashMap<String, Object> parameter = showParams(request);
httpEntity.setParameter(JSON.toJSONString(parameter));
httpEntity.setUserAgent(request.getHeader("user-agent"));
String body = "";
InputStream stream = RequestContext.getCurrentContext().getResponseDataStream();
byte[] bytes = StreamUtils.copyToByteArray(stream);
body = new String(bytes, StandardCharsets.UTF_8);
httpEntity.setResult(body);
long startTime = (long) RequestContext.getCurrentContext().get(START_TIME_KEY);
httpEntity.setLaunchTime(new Date(startTime));
httpEntity.setDuration(System.currentTimeMillis() - startTime);
httpEntity.setTraceId(RequestContext.getCurrentContext().getZuulRequestHeaders().get(LogCollectionConstants.traceId));
log.info("接口统计 {}",JSON.toJSONString(httpEntity));
RequestContext.getCurrentContext().setResponseBody(body);
} catch (Exception e) {
log.error("日志统计失败", e);
return true;
}
return true;
}
public static HashMap<String, Object> showParams(HttpServletRequest request) {
HashMap<String, Object> map = new HashMap<>();
Enumeration paramNames = request.getParameterNames();
while (paramNames.hasMoreElements()) {
String paramName = (String) paramNames.nextElement();
String[] paramValues = request.getParameterValues(paramName);
if (paramValues.length > 0) {
String paramValue = paramValues[0];
if (paramValue.length() != 0) {
map.put(paramName, paramValue);
}
}
}
return map;
}
}
服务代码
WebMvcConfig.java 过滤器
注册过滤器 ,将上游请求中traceId值取出。使用MDC类将内容记录到日志中
@Configuration
public class WebMvcConfig extends WebMvcConfigurationSupport {
@Override
protected void addInterceptors(InterceptorRegistry registry) {
registry.addInterceptor(new TraceInterceptor()).addPathPatterns("/**");
super.addInterceptors(registry);
}
}
public class TraceInterceptor implements HandlerInterceptor {
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) {
String traceIdVal = request.getHeader(LogCollectionConstants.traceId);
if (StringUtils.isNotEmpty(traceIdVal)) {
MDC.put(LogCollectionConstants.traceId, traceIdVal);
}
else {
MDC.remove(LogCollectionConstants.traceId);
}
return true;
}
}
HystrixConfig.java 熔断配置
@Configuration
public class HystrixConfig extends LogHystrixConfig {
public HystrixConfig(){
super();
}
}
public class LogHystrixConfig {
public static final Logger log = LoggerFactory.getLogger(LogHystrixConfig.class);
public LogHystrixConfig(){
try {
HystrixConcurrencyStrategy target = new MdcHystrixConcurrencyStrategy();
HystrixConcurrencyStrategy strategy = HystrixPlugins.getInstance().getConcurrencyStrategy();
if (strategy instanceof MdcHystrixConcurrencyStrategy) {
return;
}
HystrixCommandExecutionHook commandExecutionHook = HystrixPlugins
.getInstance().getCommandExecutionHook();
HystrixEventNotifier eventNotifier = HystrixPlugins.getInstance()
.getEventNotifier();
HystrixMetricsPublisher metricsPublisher = HystrixPlugins.getInstance()
.getMetricsPublisher();
HystrixPropertiesStrategy propertiesStrategy = HystrixPlugins.getInstance()
.getPropertiesStrategy();
HystrixPlugins.reset();
HystrixPlugins.getInstance().registerConcurrencyStrategy(target);
HystrixPlugins.getInstance()
.registerCommandExecutionHook(commandExecutionHook);
HystrixPlugins.getInstance().registerEventNotifier(eventNotifier);
HystrixPlugins.getInstance().registerMetricsPublisher(metricsPublisher);
HystrixPlugins.getInstance().registerPropertiesStrategy(propertiesStrategy);
}
catch (Exception e) {
log.error("Failed to register Sleuth Hystrix Concurrency Strategy", e);
}
}
}
FeignInterceptorConfig.java Feign配置
@Configuration
public class FeignInterceptorConfig extends LogFeignInterceptorConfig implements RequestInterceptor {
@Override
public void apply(RequestTemplate requestTemplate) {
requestTemplate.header(LogCollectionConstants.traceId, super.getTraceId());
}
}
public class LogFeignInterceptorConfig {
public String getTraceId() {
return MDC.get(LogCollectionConstants.traceId);
}
}
定时任务AOP
定时任务在开始执行之前,给MDC类 赋值traceId
@Aspect
@Component
@Slf4j
public class ScheduledAspect {
@Pointcut("@annotation(org.springframework.scheduling.annotation.Scheduled)")
public void proxyAspect() {
}
@Before("proxyAspect()")
public void before(JoinPoint joinPoint) throws Throwable {
String traceId= LogHelper.getTraceId();
MDC.put(LogCollectionConstants.traceId, traceId);
}
}
logback.xml 日志配置文件
使用{traceId}写入MDC中的traceId的值
<property name="CONSOLE_LOG_PATTERN" value="${CONSOLE_LOG_PATTERN:-%clr(%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }) [%X{traceId}] %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
<property name="FILE_LOG_PATTERN" value="${FILE_LOG_PATTERN:-%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} ${LOG_LEVEL_PATTERN:-%5p} ${PID:- } [%X{traceId}] --- [%t] : %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
测试效果图
1、接口通过网关,访问服务1
在Service1中写入测试接口
@RestController
@Slf4j
public class UserController {
@GetMapping("/user/getlist")
public List<String> getlist() {
log.info("测试接口user");
List<String> list = asList("user1", "user2", "user3");
log.warn("测测测");
return list;
}
}
通过网关请求接口效果如下
网关中打印日志 通过日志找到traceId为40417dd85d224eda8d67925dce335b6b
Service1中的日志
2、接口通过网关,访问服务1,服务1访问服务2
测试代码
Service1中
@RestController
@Slf4j
public class UserController {
@Autowired
private DemoApiClient demoApiClient;
@GetMapping("/user/getlist2")
public List<String> getlist2() {
log.info("测试接口getlist2");
List<String> result = demoApiClient.getlist2();
log.info("请求Service2中接口成功,返回数据");
return result;
}
}
Service2中
@RestController
@Slf4j
public class DemoController {
@GetMapping("/demo/getlist2")
public List<String> getlist2() {
log.info("通过了其他的服务请求过来");
List<String> list = asList("haha", "hehe", "youyou");
log.info("请求完了");
return list;
}
}
通过网关查到traceId为f87e568753004b7e830e76497936fb08
Service1中打印日志
Service2中打印日志
3、定时任务,访问服务1
在Service1中加入测试代码
@Component
@Slf4j
public class TestJob {
@Autowired
private DemoApiClient demoApiClient;
@Scheduled(fixedRate = 2000)
public void execute() {
List<String> result = demoApiClient.getlist2();
log.info("定时任务执行 打印获取数据结果 {} ", result);
}
}
在任务中找到一条执行记录中的traceId:56fda9d194e241f9a32e6c1ecf61ee58
在Service2中查看打印日志
日志采集
filebeat
日志采集工具,以本文为例展示配置文件
filebeat部分配置内容
# ============================== Filebeat inputs ===============================
filebeat.inputs:
- type: log
enabled: true #设置成true才能启用log这个任务
paths:
- /usr/local/webapp/pro-user-ceshi/logs/info/*.log # 配置读取文件的地址
- /usr/local/webapp/pro-user-ceshi/logs/warn//*.log
include_lines: ['\[[0-9a-f]{8}([0-9a-f]{4}){3}[0-9a-f]{12}\]'] #配置读取哪些行的日志
exclude_lines: ['.*: ==> Preparing:.*','.*: ==> Parameters:.*','.*: <== Total:.*'] #配置排除哪些行的日志
fields:
type: pro-user-ceshi
multiline.pattern: '^[[:space:]]+(at|\.{3})\b|^Caused by:| SQL参数' #配合多长文本 本条是 以空格,Caused by,SQL参数 开头的都合并上一行
multiline.negate: false
multiline.match: after
- type: log
enabled: false # 关闭状态下
paths:
- /usr/local/webapp/pro-user-ceshi2/logs/sql/*.log
include_lines: ['\[[0-9a-f]{8}([0-9a-f]{4}){3}[0-9a-f]{12}\]']
exclude_lines: ['.*: ==> Preparing:.*','.*: ==> Parameters:.*','.*: <== Total:.*']
fields:
type: pro-user-ceshi
multiline.pattern: '^[[:space:]]+(at|\.{3})\b|^Caused by:|\\$'
multiline.negate: false
multiline.match: after
- type: filestream
# Change to true to enable this input configuration.
enabled: false
# Paths that should be crawled and fetched. Glob based paths.
paths:
- /var/log/*.log
# ============================== Filebeat modules ==============================
filebeat.config.modules:
path: ${path.config}/modules.d/*.yml
reload.enabled: false
# ======================= Elasticsearch template setting =======================
setup.template.settings:
index.number_of_shards: 1
# ---------------------------- Elasticsearch Output ----------------------------
output.elasticsearch:
# Array of hosts to connect to.
hosts: ["192.168.1.132:9600"] #es地址
indices:
- index: "system-service-pro-user-ceshi_logs_%{+yyyy.MM.dd}" #es设置索引名称
when.equals:
fields.type: "pro-user-ceshi"
- index: "system-pro-user-ceshi2_logs_%{+yyyy.MM.dd}"
when.equals:
fields.type: "pro-user-ceshi2"
完整代码在 https://gitee.com/momentzhj/log-collection