7.25 配置ELK日志系统

This commit is contained in:
zhangsan 2025-07-25 10:08:39 +08:00
parent 7cb33348f1
commit 645255ecf5
8 changed files with 151 additions and 2 deletions

View File

@ -0,0 +1,66 @@
version: '3'
# 执行脚本docker-compose -f docker-compose-elk.yml up -d
# mkdir -p {config,data,logs,plugins}
# 控制台GET _cat/indices - 查看 springboot-logstash- 是否存在,上报后存在,则表示接入成功
services:
elasticsearch:
image: elasticsearch:7.17.28
ports:
- '9201:9200'
- '9300:9300'
container_name: elasticsearch
restart: always
environment:
- 'cluster.name=elasticsearch' # 设置集群名称为elasticsearch
- 'discovery.type=single-node' # 以单一节点模式启动
- "cluster.name=docker-cluster" # 设置名称
- 'ES_JAVA_OPTS=-Xms512m -Xmx512m' # 设置使用jvm内存大小
networks:
- elk
logstash:
image: logstash:7.17.28
container_name: logstash
restart: always
volumes:
- /etc/localtime:/etc/localtime
- ./logstash/logstash.conf:/usr/share/logstash/pipeline/logstash.conf
ports:
- '4560:4560'
- '50000:50000/tcp'
- '50000:50000/udp'
- '9600:9600'
environment:
LS_JAVA_OPTS: -Xms1024m -Xmx1024m
TZ: Asia/Shanghai
MONITORING_ENABLED: false
links:
- elasticsearch:es # 可以用es这个域名访问elasticsearch服务
networks:
- elk
depends_on:
- elasticsearch # 依赖elasticsearch启动后在启动logstash
kibana:
image: kibana:7.17.28
container_name: kibana
restart: always
volumes:
- /etc/localtime:/etc/localtime
- ./kibana/config/kibana.yml:/usr/share/kibana/config/kibana.yml
ports:
- '5601:5601'
links:
- elasticsearch:es #可以用es这个域名访问elasticsearch服务
environment:
- ELASTICSEARCH_URL=http://elasticsearch:9200 #设置访问elasticsearch的地址
- 'elasticsearch.hosts=http://es:9200' #设置访问elasticsearch的地址
- I18N_LOCALE=zh-CN
networks:
- elk
depends_on:
- elasticsearch
networks:
elk:
driver: bridge

View File

@ -0,0 +1,10 @@
#
# ** THIS IS AN AUTO-GENERATED FILE **
#
# Default Kibana configuration for docker target
server.host: "0"
server.shutdownTimeout: "5s"
elasticsearch.hosts: [ "http://elasticsearch:9200" ] # 记得修改ip
monitoring.ui.container.elasticsearch.enabled: true
i18n.locale: "zh-CN"

View File

@ -0,0 +1,17 @@
input {
tcp {
mode => "server"
host => "0.0.0.0"
port => 4560
codec => json_lines
type => "info"
}
}
filter {}
output {
elasticsearch {
action => "index"
hosts => "es:9200"
index => "group-buy-market-log-%{+YYYY.MM.dd}"
}
}

View File

@ -91,7 +91,10 @@
<groupId>com.squareup.okhttp3</groupId>
<artifactId>logging-interceptor</artifactId>
</dependency>
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
</dependency>
<!-- 工程模块;启动依赖 trigger->domain, infrastructure-->
<dependency>
<groupId>edu.whut</groupId>

View File

@ -0,0 +1,32 @@
package edu.whut.config;
import org.jetbrains.annotations.NotNull;
import org.slf4j.MDC;
import org.springframework.stereotype.Component;
import org.springframework.web.filter.OncePerRequestFilter;
import javax.servlet.FilterChain;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.UUID;
@Component
public class TraceIdFilter extends OncePerRequestFilter {
private static final String TRACE_ID = "trace-id";
@Override
protected void doFilterInternal(@NotNull HttpServletRequest request, @NotNull HttpServletResponse response, FilterChain filterChain)
throws ServletException, IOException {
try {
String traceId = UUID.randomUUID().toString();
MDC.put(TRACE_ID, traceId);
filterChain.doFilter(request, response);
} finally {
MDC.clear();
}
}
}

View File

@ -74,6 +74,10 @@ redis:
ping-interval: 60000
keep-alive: true
# 日志logstash部署的服务器IP
logstash:
host: 127.0.0.1
# 日志
logging:
level:

View File

@ -97,6 +97,16 @@
<appender-ref ref="ERROR_FILE"/>
</appender>
<!-- &lt;!&ndash; 上报日志ELK &ndash;&gt;-->
<!-- <springProperty name="LOG_STASH_HOST" scope="context" source="logstash.host" defaultValue="127.0.0.1"/>-->
<!-- &lt;!&ndash;输出到logstash的appender&ndash;&gt;-->
<!-- <appender name="LOGSTASH" class="net.logstash.logback.appender.LogstashTcpSocketAppender">-->
<!-- &lt;!&ndash;可以访问的logstash日志收集端口&ndash;&gt;-->
<!-- <destination>${LOG_STASH_HOST}:4560</destination>-->
<!-- <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder"/>-->
<!-- </appender>-->
<!-- 开发环境:控制台打印 -->
<springProfile name="dev">
<logger name="com.nmys.view" level="debug"/>
@ -108,6 +118,8 @@
<appender-ref ref="ASYNC_FILE_INFO"/>
<!-- 异步日志-ERROR -->
<appender-ref ref="ASYNC_FILE_ERROR"/>
<!-- &lt;!&ndash; 上报日志-ELK &ndash;&gt;-->
<!-- <appender-ref ref="LOGSTASH"/>-->
</root>
</configuration>

View File

@ -138,7 +138,12 @@
<artifactId>spring-boot-starter-amqp</artifactId>
<version>3.2.0</version>
</dependency>
<!-- 上报日志 ELK -->
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
<version>7.3</version>
</dependency>
<!-- 工程模块 -->
<dependency>
<groupId>edu.whut</groupId>