7.25 配置ELK日志系统
This commit is contained in:
parent
7cb33348f1
commit
645255ecf5
66
docs/dev-ops/docker-compose-elk.yml
Normal file
66
docs/dev-ops/docker-compose-elk.yml
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
version: '3'
|
||||||
|
# 执行脚本;docker-compose -f docker-compose-elk.yml up -d
|
||||||
|
# mkdir -p {config,data,logs,plugins}
|
||||||
|
# 控制台;GET _cat/indices - 查看 springboot-logstash- 是否存在,上报后存在,则表示接入成功
|
||||||
|
services:
|
||||||
|
elasticsearch:
|
||||||
|
image: elasticsearch:7.17.28
|
||||||
|
ports:
|
||||||
|
- '9201:9200'
|
||||||
|
- '9300:9300'
|
||||||
|
container_name: elasticsearch
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
- 'cluster.name=elasticsearch' # 设置集群名称为elasticsearch
|
||||||
|
- 'discovery.type=single-node' # 以单一节点模式启动
|
||||||
|
- "cluster.name=docker-cluster" # 设置名称
|
||||||
|
- 'ES_JAVA_OPTS=-Xms512m -Xmx512m' # 设置使用jvm内存大小
|
||||||
|
networks:
|
||||||
|
- elk
|
||||||
|
|
||||||
|
logstash:
|
||||||
|
image: logstash:7.17.28
|
||||||
|
container_name: logstash
|
||||||
|
restart: always
|
||||||
|
volumes:
|
||||||
|
- /etc/localtime:/etc/localtime
|
||||||
|
- ./logstash/logstash.conf:/usr/share/logstash/pipeline/logstash.conf
|
||||||
|
ports:
|
||||||
|
- '4560:4560'
|
||||||
|
- '50000:50000/tcp'
|
||||||
|
- '50000:50000/udp'
|
||||||
|
- '9600:9600'
|
||||||
|
environment:
|
||||||
|
LS_JAVA_OPTS: -Xms1024m -Xmx1024m
|
||||||
|
TZ: Asia/Shanghai
|
||||||
|
MONITORING_ENABLED: false
|
||||||
|
links:
|
||||||
|
- elasticsearch:es # 可以用es这个域名访问elasticsearch服务
|
||||||
|
networks:
|
||||||
|
- elk
|
||||||
|
depends_on:
|
||||||
|
- elasticsearch # 依赖elasticsearch启动后在启动logstash
|
||||||
|
|
||||||
|
kibana:
|
||||||
|
image: kibana:7.17.28
|
||||||
|
container_name: kibana
|
||||||
|
restart: always
|
||||||
|
volumes:
|
||||||
|
- /etc/localtime:/etc/localtime
|
||||||
|
- ./kibana/config/kibana.yml:/usr/share/kibana/config/kibana.yml
|
||||||
|
ports:
|
||||||
|
- '5601:5601'
|
||||||
|
links:
|
||||||
|
- elasticsearch:es #可以用es这个域名访问elasticsearch服务
|
||||||
|
environment:
|
||||||
|
- ELASTICSEARCH_URL=http://elasticsearch:9200 #设置访问elasticsearch的地址
|
||||||
|
- 'elasticsearch.hosts=http://es:9200' #设置访问elasticsearch的地址
|
||||||
|
- I18N_LOCALE=zh-CN
|
||||||
|
networks:
|
||||||
|
- elk
|
||||||
|
depends_on:
|
||||||
|
- elasticsearch
|
||||||
|
|
||||||
|
networks:
|
||||||
|
elk:
|
||||||
|
driver: bridge
|
10
docs/dev-ops/kibana/config/kibana.yml
Normal file
10
docs/dev-ops/kibana/config/kibana.yml
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
#
|
||||||
|
# ** THIS IS AN AUTO-GENERATED FILE **
|
||||||
|
#
|
||||||
|
|
||||||
|
# Default Kibana configuration for docker target
|
||||||
|
server.host: "0"
|
||||||
|
server.shutdownTimeout: "5s"
|
||||||
|
elasticsearch.hosts: [ "http://elasticsearch:9200" ] # 记得修改ip
|
||||||
|
monitoring.ui.container.elasticsearch.enabled: true
|
||||||
|
i18n.locale: "zh-CN"
|
17
docs/dev-ops/logstash/logstash.conf
Normal file
17
docs/dev-ops/logstash/logstash.conf
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
input {
|
||||||
|
tcp {
|
||||||
|
mode => "server"
|
||||||
|
host => "0.0.0.0"
|
||||||
|
port => 4560
|
||||||
|
codec => json_lines
|
||||||
|
type => "info"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
filter {}
|
||||||
|
output {
|
||||||
|
elasticsearch {
|
||||||
|
action => "index"
|
||||||
|
hosts => "es:9200"
|
||||||
|
index => "group-buy-market-log-%{+YYYY.MM.dd}"
|
||||||
|
}
|
||||||
|
}
|
@ -91,7 +91,10 @@
|
|||||||
<groupId>com.squareup.okhttp3</groupId>
|
<groupId>com.squareup.okhttp3</groupId>
|
||||||
<artifactId>logging-interceptor</artifactId>
|
<artifactId>logging-interceptor</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>net.logstash.logback</groupId>
|
||||||
|
<artifactId>logstash-logback-encoder</artifactId>
|
||||||
|
</dependency>
|
||||||
<!-- 工程模块;启动依赖 trigger->domain, infrastructure-->
|
<!-- 工程模块;启动依赖 trigger->domain, infrastructure-->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>edu.whut</groupId>
|
<groupId>edu.whut</groupId>
|
||||||
|
@ -0,0 +1,32 @@
|
|||||||
|
package edu.whut.config;
|
||||||
|
|
||||||
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import org.slf4j.MDC;
|
||||||
|
import org.springframework.stereotype.Component;
|
||||||
|
import org.springframework.web.filter.OncePerRequestFilter;
|
||||||
|
|
||||||
|
import javax.servlet.FilterChain;
|
||||||
|
import javax.servlet.ServletException;
|
||||||
|
import javax.servlet.http.HttpServletRequest;
|
||||||
|
import javax.servlet.http.HttpServletResponse;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
@Component
|
||||||
|
public class TraceIdFilter extends OncePerRequestFilter {
|
||||||
|
|
||||||
|
private static final String TRACE_ID = "trace-id";
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void doFilterInternal(@NotNull HttpServletRequest request, @NotNull HttpServletResponse response, FilterChain filterChain)
|
||||||
|
throws ServletException, IOException {
|
||||||
|
try {
|
||||||
|
String traceId = UUID.randomUUID().toString();
|
||||||
|
MDC.put(TRACE_ID, traceId);
|
||||||
|
filterChain.doFilter(request, response);
|
||||||
|
} finally {
|
||||||
|
MDC.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -74,6 +74,10 @@ redis:
|
|||||||
ping-interval: 60000
|
ping-interval: 60000
|
||||||
keep-alive: true
|
keep-alive: true
|
||||||
|
|
||||||
|
# 日志;logstash部署的服务器IP
|
||||||
|
logstash:
|
||||||
|
host: 127.0.0.1
|
||||||
|
|
||||||
# 日志
|
# 日志
|
||||||
logging:
|
logging:
|
||||||
level:
|
level:
|
||||||
|
@ -97,6 +97,16 @@
|
|||||||
<appender-ref ref="ERROR_FILE"/>
|
<appender-ref ref="ERROR_FILE"/>
|
||||||
</appender>
|
</appender>
|
||||||
|
|
||||||
|
<!-- <!– 上报日志;ELK –>-->
|
||||||
|
<!-- <springProperty name="LOG_STASH_HOST" scope="context" source="logstash.host" defaultValue="127.0.0.1"/>-->
|
||||||
|
|
||||||
|
<!-- <!–输出到logstash的appender–>-->
|
||||||
|
<!-- <appender name="LOGSTASH" class="net.logstash.logback.appender.LogstashTcpSocketAppender">-->
|
||||||
|
<!-- <!–可以访问的logstash日志收集端口–>-->
|
||||||
|
<!-- <destination>${LOG_STASH_HOST}:4560</destination>-->
|
||||||
|
<!-- <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder"/>-->
|
||||||
|
<!-- </appender>-->
|
||||||
|
|
||||||
<!-- 开发环境:控制台打印 -->
|
<!-- 开发环境:控制台打印 -->
|
||||||
<springProfile name="dev">
|
<springProfile name="dev">
|
||||||
<logger name="com.nmys.view" level="debug"/>
|
<logger name="com.nmys.view" level="debug"/>
|
||||||
@ -108,6 +118,8 @@
|
|||||||
<appender-ref ref="ASYNC_FILE_INFO"/>
|
<appender-ref ref="ASYNC_FILE_INFO"/>
|
||||||
<!-- 异步日志-ERROR -->
|
<!-- 异步日志-ERROR -->
|
||||||
<appender-ref ref="ASYNC_FILE_ERROR"/>
|
<appender-ref ref="ASYNC_FILE_ERROR"/>
|
||||||
|
<!-- <!– 上报日志-ELK –>-->
|
||||||
|
<!-- <appender-ref ref="LOGSTASH"/>-->
|
||||||
</root>
|
</root>
|
||||||
|
|
||||||
</configuration>
|
</configuration>
|
7
pom.xml
7
pom.xml
@ -138,7 +138,12 @@
|
|||||||
<artifactId>spring-boot-starter-amqp</artifactId>
|
<artifactId>spring-boot-starter-amqp</artifactId>
|
||||||
<version>3.2.0</version>
|
<version>3.2.0</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<!-- 上报日志 ELK -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>net.logstash.logback</groupId>
|
||||||
|
<artifactId>logstash-logback-encoder</artifactId>
|
||||||
|
<version>7.3</version>
|
||||||
|
</dependency>
|
||||||
<!-- 工程模块 -->
|
<!-- 工程模块 -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>edu.whut</groupId>
|
<groupId>edu.whut</groupId>
|
||||||
|
Loading…
x
Reference in New Issue
Block a user