ELK

ELK即Elasticsearch、Logstash、Kibana组合起来搭建的线上日志系统

ELK各个服务的作用

  • ElasticSearch:用于存储收集到的日志信息
  • Logstach:用于收集日志,应用整合Logstach会将日志发送给Logstach,Logstach再将日志转发到ElasticSearch
  • Kibana:通过Web端的可视化界面来查看日志

使用Docker Compose搭建ELK环境

拉取镜像

1
2
3
docker pull elasticsearch:6.4.0
docker pull logstash:6.4.0
docker pull kibana:6.4.0

使用docker-compose.yml脚本启动ELK服务

创建配置文件存放目录

1
mkdir /mydata/logstash

上传logstash-springboot.conf文件

1
2
3
4
5
6
7
8
9
10
11
12
13
14
input {
tcp {
mode => "server"
host => "0.0.0.0"
port => 4560
codec => json_lines
}
}
output {
elasticsearch {
hosts => "es:9200"
index => "springboot-logstash-%{+YYYY.MM.dd}"
}
}

docker-compose.yml内容

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
version: '3'
services:
elasticsearch:
image: elasticsearch:6.4.0
container_name: elasticsearch
environment:
- "cluster.name=elasticsearch" # 设置集群名称为elasticsearch
- "discovery.type=single-node" # 以单一节点模式启动
- "ES_JAVA_OPTS=-Xms512m -Xmx512m" # 设置使用jvm内存大小
volumes:
- /mydata/elasticsearch/plugins:/usr/share/elasticsearch/plugins # 插件文件挂载
- /mydata/elasticsearch/data:/usr/share/elasticsearch/data # 数据文件挂载
ports:
- 9200:9200
kibana:
image: kibana:6.4.0
container_name: kibana
links:
- elasticsearch:es # 可以用es这个域名访问elasticsearch服务
depends_on:
- elasticsearch # kibana在elasticsearch启动之后再启动
environment:
- "elasticsearch.hosts=http://es:9200" # 设置访问elasticsearch的地址
ports:
- 5601:5601
logstash:
image: logstash:6.4.0
container_name: logstash
volumes:
- /mydata/logstash/logstash-springboot.conf:/usr/share/logstash/pipeline/logstash.conf # 挂载logstash的配置文件
depends_on:
- elasticsearch # kibana在elasticsearch启动之后再启动
links:
- elasticsearch:es # 可以用es这个域名访问elasticsearch服务
ports:
- 4560:4560

上传到linux服务器并运行

1
docker-compose up -d

ElasticSearch安装中文分词器

1
2
3
4
5
6
7
8
# 进入es容器
docker exec -it elasticsearch /bin/bash
# 安装中文分词器插件
elasticsearch-plugin install https://github.com/medcl/elasticsearch-analysis-ik/releases/download/v6.4.0/elasticsearch-analysis-ik-6.4.0.zip
# 退出容器
exit
# 重启es服务
docker restart elasticsearch

logstash中安装json_lines插件

1
2
3
4
5
6
7
8
9
10
# 进入logstash容器
docker exec -it logstash /bin/bash
# 进入bin目录
cd /bin/
# 安装插件
logstash-plugin install logstash-codec-json_lines
# 退出容器
exit
# 重启logstash服务
docker restart logstash

访问地址:http://localhost:5601/

image-20230504041139207

应用集成Logstach

添加依赖

1
2
3
4
5
6
<!--集成logstash-->
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
<version>5.3</version>
</dependency>

添加配置文件logback-spring.xml

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<!-- 引用默认日志配置 -->
<include resource="org/springframework/boot/logging/logback/defaults.xml"/>
<!-- 使用默认的控制台日志输出实现 -->
<include resource="org/springframework/boot/logging/logback/console-appender.xml"/>
<!-- 应用名称 -->
<springProperty scope="context" name="APP_NAME"
source="spring.application.name" defaultValue="springBoot"/>
<!-- LogStash访问host -->
<springProperty name="LOG_STASH_HOST" scope="context"
source="logstash.host" defaultValue="localhost"/>
<!-- 日志文件保存路径 -->
<property name="LOG_FILE_PATH"
value="${LOG_FILE:-${LOG_PATH:-${LOG_TEMP:-${java.io.tmpdir:-/tmp}}}/logs}"/>
<contextName>${APP_NAME}</contextName>

<!-- DEBUG日志输出到控制台 -->
<appender name="CONSOLE"
class="ch.qos.logback.core.ConsoleAppender">
<!-- 输出DEBUG以上级别日志 -->
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>DEBUG</level>
</filter>
<encoder>
<!-- 设置为默认的文件日志格式 -->
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>UTF-8</charset>
</encoder>
</appender>

<!-- DEBUG日志输出到文件 -->
<appender name="FILE_DEBUG"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 输出DEBUG以上级别日志 -->
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>DEBUG</level>
</filter>
<encoder>
<!-- 设置为默认的文件日志格式 -->
<pattern>${FILE_LOG_PATTERN}</pattern>
<!-- 设置日志字符集 -->
<charset>UTF-8</charset>
</encoder>
<!-- 日志名称,如果没有 File 属性,那么只会使用FileNamePattern的文件路径规则;如果同时有<File>和<FileNamePattern>,那么当天日志是<File>,明天会自动把今天的日志改名为今天的日期。即,<File> 的日志都是当天的 -->
<!-- <File>logs/info.demo-logback.log</File> -->
<!-- 滚动策略,基于文件大小和时间滚动 SizeAndTimeBasedRollingPolicy -->
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 设置文件命名格式以及文件路径,定义日志切片方式 -->
<fileNamePattern>${LOG_FILE_PATH}/debug/${APP_NAME}-%d{yyyy-MM-dd}-%i.log</fileNamePattern>
<!-- 设置日志文件大小,超过就重新生成文件,默认10M -->
<maxFileSize>${LOG_FILE_MAX_SIZE:-10MB}</maxFileSize>
<!-- 日志文件保留天数,默认30天 -->
<maxHistory>${LOG_FILE_MAX_HISTORY:-30}</maxHistory>
<!-- 设置日志文件的上限大小 -->
<totalSizeCap>30GB</totalSizeCap>
</rollingPolicy>
</appender>

<!-- ERROR日志输出到文件 -->
<appender name="FILE_ERROR"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 只输出ERROR级别的日志 -->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<!-- 匹配到ACCEPT允许 -->
<onMatch>ACCEPT</onMatch>
<!-- 没有匹配就禁止 -->
<onMismatch>DENY</onMismatch>
</filter>
<encoder>
<!-- 设置为默认的文件日志格式 -->
<pattern>${FILE_LOG_PATTERN}</pattern>
<charset>UTF-8</charset>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 设置文件命名格式 -->
<fileNamePattern>${LOG_FILE_PATH}/error/${APP_NAME}-%d{yyyy-MM-dd}-%i.log</fileNamePattern>
<!-- 设置日志文件大小,超过就重新生成文件,默认10M -->
<maxFileSize>${LOG_FILE_MAX_SIZE:-10MB}</maxFileSize>
<!-- 日志文件保留天数,默认30天 -->
<maxHistory>${LOG_FILE_MAX_HISTORY:-30}</maxHistory>
</rollingPolicy>
</appender>

<!-- DEBUG日志输出到LogStash -->
<appender name="LOG_STASH_DEBUG" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<!-- 只输出DEBUG级别的日志 -->
<level>DEBUG</level>
</filter>
<destination>${LOG_STASH_HOST}:4560</destination>
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>
<timeZone>Asia/Shanghai</timeZone>
</timestamp>
<!-- 自定义日志输出格式 -->
<pattern>
<pattern>
{
"project": "demo",
"level": "%level",
"service": "${APP_NAME:-}",
"pid": "${PID:-}",
"thread": "%thread",
"class": "%logger",
"message": "%message",
"stack_trace": "%exception{20}"
}
</pattern>
</pattern>
</providers>
</encoder>
<!-- 当有多个LogStash服务时,设置访问策略为轮询 -->
<connectionStrategy>
<roundRobin>
<connectionTTL>5 minutes</connectionTTL>
</roundRobin>
</connectionStrategy>
</appender>

<!-- ERROR日志输出到LogStash -->
<appender name="LOG_STASH_ERROR" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<destination>${LOG_STASH_HOST}:4561</destination>
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>
<timeZone>Asia/Shanghai</timeZone>
</timestamp>
<!--自定义日志输出格式-->
<pattern>
<pattern>
{
"project": "demo",
"level": "%level",
"service": "${APP_NAME:-}",
"pid": "${PID:-}",
"thread": "%thread",
"class": "%logger",
"message": "%message",
"stack_trace": "%exception{20}"
}
</pattern>
</pattern>
</providers>
</encoder>
<!-- 当有多个LogStash服务时,设置访问策略为轮询 -->
<connectionStrategy>
<roundRobin>
<connectionTTL>5 minutes</connectionTTL>
</roundRobin>
</connectionStrategy>
</appender>

<!-- 业务日志输出到LogStash -->
<appender name="LOG_STASH_BUSINESS" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>${LOG_STASH_HOST}:4562</destination>
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>
<timeZone>Asia/Shanghai</timeZone>
</timestamp>
<!-- 自定义日志输出格式 -->
<pattern>
<pattern>
{
"project": "demo",
"level": "%level",
"service": "${APP_NAME:-}",
"pid": "${PID:-}",
"thread": "%thread",
"class": "%logger",
"message": "%message",
"stack_trace": "%exception{20}"
}
</pattern>
</pattern>
</providers>
</encoder>
<!-- 当有多个LogStash服务时,设置访问策略为轮询 -->
<connectionStrategy>
<roundRobin>
<connectionTTL>5 minutes</connectionTTL>
</roundRobin>
</connectionStrategy>
</appender>

<!-- 接口访问记录日志输出到LogStash -->
<appender name="LOG_STASH_RECORD" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>${LOG_STASH_HOST}:4563</destination>
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>
<timeZone>Asia/Shanghai</timeZone>
</timestamp>
<!-- 自定义日志输出格式 -->
<pattern>
<pattern>
{
"project": "demo",
"level": "%level",
"service": "${APP_NAME:-}",
"class": "%logger",
"message": "%message"
}
</pattern>
</pattern>
</providers>
</encoder>
<!-- 当有多个LogStash服务时,设置访问策略为轮询 -->
<connectionStrategy>
<roundRobin>
<connectionTTL>5 minutes</connectionTTL>
</roundRobin>
</connectionStrategy>
</appender>

<!-- 控制框架输出日志 -->
<logger name="org.slf4j" level="INFO"/>
<logger name="springfox" level="INFO"/>
<logger name="io.swagger" level="INFO"/>
<logger name="org.springframework" level="INFO"/>
<logger name="org.hibernate.validator" level="INFO"/>

<logger name="com.example.demo.component.WebLogAspect" level="DEBUG">
<appender-ref ref="LOG_STASH_RECORD"/>
</logger>

<logger name="com.example.demo" level="DEBUG">
<appender-ref ref="LOG_STASH_BUSINESS"/>
</logger>

<root level="DEBUG">
<appender-ref ref="CONSOLE"/>
<appender-ref ref="FILE_DEBUG"/>
<appender-ref ref="FILE_ERROR"/>
<appender-ref ref="LOG_STASH_DEBUG"/>
<appender-ref ref="LOG_STASH_ERROR"/>
</root>
</configuration>

编辑配置文件

1
2
3
4
5
6
7
8
9
10
11
12
13
server:
port: 8080

spring:
application:
name: demo

logstash:
host: 1.117.34.49

logging:
level:
root: debug

启动应用

在kibana中查看日志信息

创建index pattern

img

img

img

查看收集的日志

image-20230504043825348