diff --git a/docs/dev-ops/docker-compose-elk.yml b/docs/dev-ops/docker-compose-elk.yml
new file mode 100644
index 0000000..fbf353d
--- /dev/null
+++ b/docs/dev-ops/docker-compose-elk.yml
@@ -0,0 +1,66 @@
+version: '3'
+# 执行脚本;docker-compose -f docker-compose-elk.yml up -d
+# mkdir -p {config,data,logs,plugins}
+# 控制台;GET _cat/indices - 查看 springboot-logstash- 是否存在,上报后存在,则表示接入成功
+services:
+ elasticsearch:
+ image: elasticsearch:7.17.28
+ ports:
+ - '9201:9200'
+ - '9300:9300'
+ container_name: elasticsearch
+ restart: always
+ environment:
+ - 'cluster.name=elasticsearch' # 设置集群名称为elasticsearch
+ - 'discovery.type=single-node' # 以单一节点模式启动
+ - "cluster.name=docker-cluster" # 设置名称
+ - 'ES_JAVA_OPTS=-Xms512m -Xmx512m' # 设置使用jvm内存大小
+ networks:
+ - elk
+
+ logstash:
+ image: logstash:7.17.28
+ container_name: logstash
+ restart: always
+ volumes:
+ - /etc/localtime:/etc/localtime
+ - ./logstash/logstash.conf:/usr/share/logstash/pipeline/logstash.conf
+ ports:
+ - '4560:4560'
+ - '50000:50000/tcp'
+ - '50000:50000/udp'
+ - '9600:9600'
+ environment:
+ LS_JAVA_OPTS: -Xms1024m -Xmx1024m
+ TZ: Asia/Shanghai
+ MONITORING_ENABLED: false
+ links:
+ - elasticsearch:es # 可以用es这个域名访问elasticsearch服务
+ networks:
+ - elk
+ depends_on:
+ - elasticsearch # 依赖elasticsearch启动后在启动logstash
+
+ kibana:
+ image: kibana:7.17.28
+ container_name: kibana
+ restart: always
+ volumes:
+ - /etc/localtime:/etc/localtime
+ - ./kibana/config/kibana.yml:/usr/share/kibana/config/kibana.yml
+ ports:
+ - '5601:5601'
+ links:
+ - elasticsearch:es #可以用es这个域名访问elasticsearch服务
+ environment:
+ - ELASTICSEARCH_URL=http://elasticsearch:9200 #设置访问elasticsearch的地址
+ - 'elasticsearch.hosts=http://es:9200' #设置访问elasticsearch的地址
+ - I18N_LOCALE=zh-CN
+ networks:
+ - elk
+ depends_on:
+ - elasticsearch
+
+networks:
+ elk:
+ driver: bridge
\ No newline at end of file
diff --git a/docs/dev-ops/kibana/config/kibana.yml b/docs/dev-ops/kibana/config/kibana.yml
new file mode 100644
index 0000000..18aed52
--- /dev/null
+++ b/docs/dev-ops/kibana/config/kibana.yml
@@ -0,0 +1,10 @@
+#
+# ** THIS IS AN AUTO-GENERATED FILE **
+#
+
+# Default Kibana configuration for docker target
+server.host: "0"
+server.shutdownTimeout: "5s"
+elasticsearch.hosts: [ "http://elasticsearch:9200" ] # 记得修改ip
+monitoring.ui.container.elasticsearch.enabled: true
+i18n.locale: "zh-CN"
\ No newline at end of file
diff --git a/docs/dev-ops/logstash/logstash.conf b/docs/dev-ops/logstash/logstash.conf
new file mode 100644
index 0000000..d8acebb
--- /dev/null
+++ b/docs/dev-ops/logstash/logstash.conf
@@ -0,0 +1,17 @@
+input {
+ tcp {
+ mode => "server"
+ host => "0.0.0.0"
+ port => 4560
+ codec => json_lines
+ type => "info"
+ }
+}
+filter {}
+output {
+ elasticsearch {
+ action => "index"
+ hosts => "es:9200"
+ index => "group-buy-market-log-%{+YYYY.MM.dd}"
+ }
+}
diff --git a/group-buying-sys-app/pom.xml b/group-buying-sys-app/pom.xml
index 1eb6c9e..2108023 100644
--- a/group-buying-sys-app/pom.xml
+++ b/group-buying-sys-app/pom.xml
@@ -91,7 +91,10 @@
com.squareup.okhttp3
logging-interceptor
-
+
+ net.logstash.logback
+ logstash-logback-encoder
+
edu.whut
diff --git a/group-buying-sys-app/src/main/java/edu/whut/config/TraceIdFilter.java b/group-buying-sys-app/src/main/java/edu/whut/config/TraceIdFilter.java
new file mode 100644
index 0000000..f90a497
--- /dev/null
+++ b/group-buying-sys-app/src/main/java/edu/whut/config/TraceIdFilter.java
@@ -0,0 +1,32 @@
+package edu.whut.config;
+
+import org.jetbrains.annotations.NotNull;
+import org.slf4j.MDC;
+import org.springframework.stereotype.Component;
+import org.springframework.web.filter.OncePerRequestFilter;
+
+import javax.servlet.FilterChain;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.util.UUID;
+
+@Component
+public class TraceIdFilter extends OncePerRequestFilter {
+
+ private static final String TRACE_ID = "trace-id";
+
+ @Override
+ protected void doFilterInternal(@NotNull HttpServletRequest request, @NotNull HttpServletResponse response, FilterChain filterChain)
+ throws ServletException, IOException {
+ try {
+ String traceId = UUID.randomUUID().toString();
+ MDC.put(TRACE_ID, traceId);
+ filterChain.doFilter(request, response);
+ } finally {
+ MDC.clear();
+ }
+ }
+
+}
diff --git a/group-buying-sys-app/src/main/resources/application-dev.yml b/group-buying-sys-app/src/main/resources/application-dev.yml
index 99ac3ef..8e90c9d 100644
--- a/group-buying-sys-app/src/main/resources/application-dev.yml
+++ b/group-buying-sys-app/src/main/resources/application-dev.yml
@@ -74,6 +74,10 @@ redis:
ping-interval: 60000
keep-alive: true
+# 日志;logstash部署的服务器IP
+logstash:
+ host: 127.0.0.1
+
# 日志
logging:
level:
diff --git a/group-buying-sys-app/src/main/resources/logback-spring.xml b/group-buying-sys-app/src/main/resources/logback-spring.xml
index 932ad72..afc86be 100644
--- a/group-buying-sys-app/src/main/resources/logback-spring.xml
+++ b/group-buying-sys-app/src/main/resources/logback-spring.xml
@@ -97,6 +97,16 @@
+
+
+
+
+
+
+
+
+
+
@@ -108,6 +118,8 @@
+
+
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index 579d704..9a54e22 100644
--- a/pom.xml
+++ b/pom.xml
@@ -138,7 +138,12 @@
spring-boot-starter-amqp
3.2.0
-
+
+
+ net.logstash.logback
+ logstash-logback-encoder
+ 7.3
+
edu.whut