目录
简介
实验环境
安装
建立软连接
输出采集
file日志采集
采集系统日志syslog
多行过滤插件
grok过滤插件
简介
Logstash:是一个具有实时渠道能力的数据收集引擎,主要用于日志的收集与解析,并将其存入 ElasticSearch中。与ElasticSearch有很高的适配性。
实验环境
ES集群优化结束
去新rhel7.6server4
安装
rpm -ivh jdk-8u181-linux-x64.rpm
rpm -ivh logstash-7.6.1.rpm
建立软连接
ln -s /usr/share/logstash/bin/logstash /usr/bin/
Stashing Your First Event | Logstash Reference [7.6] | Elastic
logstash -e 'input { stdin { } } output { stdout {} }'
输出采集
编写测试文件
cd /etc/logstash/conf.d/
vim test.conf
input {
stdin {}
}
output {
stdout{}
elasticsearch {
hosts => ["172.25.7.1:9200"]
index => "logstash-%{+yyyy.MM.dd}"
}
}
logstash -f /etc/logstash/conf.d/test.conf
网页查看
file日志采集
input {
#stdin {}
file {
path => "/var/log/messages"
start_position => "beginning"
}
}
output {
stdout{}
elasticsearch {
hosts => ["172.25.7.1:9200"]
index => "messages-%{+yyyy.MM.dd}"
}
}
logstash -f /etc/logstash/conf.d/test.conf
inode 编号
[root@server4 conf.d]# cd /usr/share/logstash/data/plugins/
[root@server4 plugins]# ls
inputs
[root@server4 plugins]# cd inputs/
[root@server4 inputs]# ls
file
[root@server4 inputs]# cd file/
[root@server4 file]# ls
[root@server4 file]# pwd
/usr/share/logstash/data/plugins/inputs/file
[root@server4 file]# l.
. .. .sincedb_452905a167cf4509fd08acb964fdb20c
[root@server4 file]# cat .sincedb_452905a167cf4509fd08acb964fdb20c
51322019 0 64768 381 1640540717.129531 /var/log/messages
[root@server4 file]# ls -i /var/log/messages
51322019 /var/log/messages
[root@server4 file]#
.inode编号 ---->
文件系统的主要设备号---->
文件系统的次要设备号---->
文件中的当前字节偏移量---->
最后一个活动时间戳(浮点数)---->
与此记录匹配的最后一个已知路径---->/var/log/messages
删除掉,日志才会重新加载,生成索引
采集系统日志syslog
input {
# file {
# path => "/var/log/messages"
# start_position => "beginning"
#}
syslog {
port => 514
}
}
output {
stdout {}
elasticsearch {
hosts => ["172.25.7.1:9200"]
index => "syslog-%{+yyyy.MM.dd}"
}
}
server1中
打开/etc/rsyslog.conf
添加
*.* 172.25.7.4:514
可以通过四采集到1的日志
多行过滤插件
[root@server1 elasticsearch-head-master]# scp /var/log/elasticsearch/my-es.log server4:/var/log/
[root@server4 ~]# cat /etc/logstash/conf.d/rsys.conf
input {
file {
path => "/var/log/my-es.log"
start_position => "beginning"
}
# syslog {
#port => 514
# }
}
output {
stdout {}
elasticsearch {
hosts => ["172.25.7.1:9200"]
index => "myes-%{+yyyy.MM.dd}"
}
}
logstash -f /etc/logstash/conf.d/rsys.conf
日志的单行不可取
多行输出
[root@server4 conf.d]# cat rsys.conf
input {
file {
path => "/var/log/my-es.log"
start_position => "beginning"
codec => multiline { #多行
pattern => "^EOF" #关键字
negate => true #匹配到
what => previous #向上合并
}
}
# syslog {
#port => 514
# }
}
output {
stdout {}
elasticsearch {
hosts => ["172.25.7.1:9200"]
index => "myes-%{+yyyy.MM.dd}"
}
}
针对日志选择关键字 "^\["
cd /usr/share/logstash/data/plugins/inputs/file/
[root@server4 file]# l.
.
..
.sincedb_13f094911fdac7ab3fa6f4c93fee6639
.sincedb_15940cad53dd1d99808eeaecd6f6ad3f
.sincedb_452905a167cf4509fd08acb964fdb20c
[root@server4 file]# cat .sincedb_13f094911fdac7ab3fa6f4c93fee6639
51322070 0 64768 53362 1640547178.565062 /var/log/my-es.log
[root@server4 file]# rm -rf .sincedb_13f094911fdac7ab3fa6f4c93fee6639
logstash -f /etc/logstash/conf.d/rsys.conf
grok过滤插件
切片apache访问日志
input {
file {
path => "/var/log/httpd/access_log"
start_position => "beginning"
}
}
filter {
grok {
match => { "message" => "%{HTTPD_COMBINEDLOG}" }
}
}
output {
stdout {}
elasticsearch {
hosts => ["172.25.7.1:9200"]
index => "apachelog-%{+yyyy.MM.dd}"
}
}
固定服务变量模板以准备好,变量所在目录:
cd /usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-patterns-core-4.1.2/patterns/
安装apache
echo server4 > /var/www/html/index.html
systemctl start httpd
chmod 755 /var/log/httpd/
主机访问服务
启动脚本