반응형
1. 기본 설정
# 설정 경로
[root@localhost]# vim /etc/logstash/logstash.yml
...
# data 저장 경로
path.data: /var/lib/logstash
# log 저장 경로
path.logs: /var/log/logstash
2. 로그 파싱을 위한 pipeline 기본 설정
- json 데이터를 파싱
- mutate를 이용 필드 rename이나 필드 type 변경
- elasticsearch와 stdout 둘다 output 처리
# filebeat에서 데이터를 받기위해 5044포트 바인딩
[root@localhost]# cat /etc/logstash/conf.d/gcp.conf
input {
beats {
port => 5044
host => "0.0.0.0"
}
}
filter {
json {
source => "message"
}
mutate {
rename => ["host", "server"]
convert => {"server" => "string"}
}
}
output {
elasticsearch {
hosts => "http://localhost:9200"
index => "demo-json"
}
stdout {}
}
3. 서비스 시작 및 확인
# 서비스 시작
[root@localhost]# systemctl start logstash
# 프로세스 확인
[root@localhost]# ps aux | grep logstash
logstash 10685 1.6 19.9 7474360 1596452 ? SNsl Oct18 12:20 /usr/share/logstash/jdk/bin/java -Xms1g -Xmx1g -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly -Djava.awt.headless=true -Dfile.encoding=UTF-8 -Djruby.compile.invokedynamic=true -Djruby.jit.threshold=0 -Djruby.regexp.interruptible=true -XX:+HeapDumpOnOutOfMemoryError -Djava.security.egd=file:/dev/urandom -Dlog4j2.isThreadContextMapInheritable=true -cp /usr/share/logstash/logstash-core/lib/jars/animal-sniffer-annotations-1.14.jar:/usr/share/logstash/logstash-core/lib/jars/checker-compat-qual-2.0.0.jar:/usr/share/logstash/logstash-core/lib/jars/commons-codec-1.14.jar:/usr/share/logstash/logstash-core/lib/jars/commons-compiler-3.1.0.jar:/usr/share/logstash/logstash-core/lib/jars/commons-logging-1.2.jar:/usr/share/logstash/logstash-core/lib/jars/error_prone_annotations-2.1.3.jar:/usr/share/logstas/logstash-core/lib/jars/google-java-format-1.1.jar:/usr/share/logstash/logstash-core/lib/jars/gradle-license-report-0.7.1.jar:/usr/share/logstash/logstash-core/lib/jars/guava-24.1.1-jre.jar:/usr/share/logstash/logstash-core/lib/jars/j2objc-annotations-1.1.jar:/usr/share/logstash/logstash-core/lib/jars/jackson-annotations-2.9.10.jar:/usr/share/logstash/logstash-core/lib/jars/jackson-core-2.9.10.jar:/usr/share/logstash/logstash-core/lib/jars/jackson-databind-2.9.10.8.jar:/usr/share/logstash/logstash-core/lib/jars/jackson-dataformat-cbor-2.9.10.jar:/usr/share/logstash/logstash-core/lib/jars/jackson-dataformat-yaml-2.9.10.jar:/usr/share/logstash/logstash-core/lib/jars/janino-3.1.0.jar:/usr/share/logstash/logstash-core/lib/jars/javassist-3.26.0-GA.jar:/usr/share/logstash/logstash-core/lib/jars/jruby-complete-9.2.19.0.jar:/usr/share/logstash/logstash-core/lib/jars/jsr305-1.3.9.jar:/usr/share/logstash/logstash-core/lib/jars/log4j-1.2-api-2.14.0.jar:/usr/share/logstash/logstash-core/lib/jars/log4j-api-2.14.0.jar:/usr/share/logstash/logstash-core/lib/jars/log4j-core-2.14.0.jar:/usr/share/logstash/logstash-core/lib/jars/log4j-jcl-2.14.0.jar:/usr/share/logstash/logstash-core/lib/jars/log4j-slf4j-impl-2.14.0.jar:/usr/share/logstash/logstash-core/lib/jars/logstash-core.jar:/usr/share/logstash/logstash-core/lib/jars/org.eclipse.core.commands-3.6.0.jar:/usr/share/logstash/logstash-core/lib/jars/org.eclipse.core.contenttype-3.4.100.jar:/usr/share/logstash/logstash-core/lib/jars/org.eclipse.core.expressions-3.4.300.jar:/usr/share/logstash/logstash-core/lib/jars/org.eclipse.core.filesystem-1.3.100.jar:/usr/share/logstash/logstash-core/lib/jars/org.eclipse.core.jobs-3.5.100.jar:/usr/share/logstash/logstash-core/lib/jars/org.eclipse.core.resources-3.7.100.jar:/usr/share/logstash/logstash-core/lib/jars/org.eclipse.core.runtime-3.7.0.jar:/usr/share/logstash/logstash-core/lib/jars/org.eclipse.equinox.app-1.3.100.jar:/usr/share/logstash/logstash-core/lib/jars/org.eclipse.equinox.common-3.6.0.jar:/usr/share/logstash/logstash-core/lib/jars/org.eclipse.equinox.preferences-3.4.1.jar:/usr/share/logstash/logstash-core/lib/jars/org.eclipse.equinox.registry-3.5.101.jar:/usr/share/logstash/logstash-core/lib/jars/org.eclipse.jdt.core-3.10.0.jar:/usr/share/logstash/logstash-core/lib/jars/org.eclipse.osgi-3.7.1.jar:/usr/share/logstash/logstash-core/lib/jars/org.eclipse.text-3.5.101.jar:/usr/share/logstash/logstash-core/lib/jars/reflections-0.9.11.jar:/usr/share/logstash/logstash-core/lib/jars/slf4j-api-1.7.30.jar:/usr/share/logstash/logstash-core/lib/jars/snakeyaml-1.23.jar org.logstash.Logstash --path.settings /etc/logstash
# pstree 확인
[root@localhost gcp-test]# pstree
systemd─┬─NetworkManager───2*[{NetworkManager}]
...
├─java───82*[{java}]
...
# netstat 확인
[root@localhost gcp-test]# netstat -anp | grep 10685
tcp6 0 0 :::5044 :::* LISTEN 10685/java
tcp6 0 0 127.0.0.1:9600 :::* LISTEN 10685/java
4. logstash bin 경로
# bashrc에 등록해서 사용하는 것이 좋음.
[root@localhost]# ll /usr/share/logstash/bin/
5. json pipeline 테스트
- Cloudflare Spectrum Log를 파싱 테스트
# json 파일 예제 생성
# CF Spectrum Log 예제
[root@localhost gcp-test]# cat log.log
{"Application":"cb9c4c042e414b998d7afeec60ac1893","ClientAsn":9605,"ClientBytes":0,"ClientCountry":"jp","ClientIP":"{client ip}","ClientMatchedIpFirewall":"ALLOW","ClientPort":49531,"ClientProto":"tcp","ClientTcpRtt":0,"ClientTlsCipher":"unknown","ClientTlsClientHelloServerName":"","ClientTlsProtocol":"unknown","ClientTlsStatus":"UNKNOWN","ColoCode":"KIX","ConnectTimestamp":"2021-10-18T06:43:17Z","DisconnectTimestamp":"1970-01-01T00:00:00Z","Event":"connect","IpFirewall":true,"OriginBytes":0,"OriginIP":"{origin ip}","OriginPort":{origin port},"OriginProto":"tcp","OriginTcpRtt":0,"OriginTlsCipher":"unknown","OriginTlsFingerprint":"0000000000000000000000000000000000000000000000000000000000000000","OriginTlsMode":"off","OriginTlsProtocol":"unknown","OriginTlsStatus":"UNKNOWN","ProxyProtocol":"off","Status":201,"Timestamp":"2021-10-18T06:43:17Z"}
{"Application":"5fb836cc91ab4ce4b130eb94bf8cbb7a","ClientAsn":9231,"ClientBytes":8359,"ClientCountry":"hk","ClientIP":"{client ip}","ClientMatchedIpFirewall":"ALLOW","ClientPort":9167,"ClentProto":"tcp","ClientTcpRtt":28238000,"ClientTlsCipher":"unknown","ClientTlsClientHelloServerName":"","ClientTlsProtocol":"unknown","ClientTlsStatus":"UNKNOWN","ColoCode":"HKG","ConnectTimestamp":"2021-10-18T06:36:31Z","DisconnectTimestamp":"2021-10-18T06:43:22Z","Event":"disconnect","IpFirewall":true,"OriginBytes":140710,"OriginIP":"{origin ip}","OriginPort":{origin port},"OriginProto":"tcp","OriginTcpRtt":35709000,"OriginTlsCipher":"unknown","OriginTlsFingerprint":"0000000000000000000000000000000000000000000000000000000000000000","OriginTlsMode":"off","OriginTlsProtocol":"unknown","OriginTlsStatus":"UNKNOWN","ProxyProtocol":"off","Status":200,"Timestamp":"2021-10-18T06:43:22Z"}
# logstassh pipeline 설정
[root@localhost gcp-test]# cat gcp.conf
input {
file {
start_position => "beginning"
path => "/opt/gcp-test/log.log" # json 예제 경로
sincedb_path => "/dev/null"
}
}
filter {
json {
source => "message" # message 필드의 값을 json으로 파싱
}
}
output {
elasticsearch {
hosts => "http://localhost:9200" # elasticsearch 경로
index => "demo-json" # elasticsearch 에서 사용할 index 값 설정
}
stdout {} # elasticsearch와 stdout 같이 출력
}
# 실행
[root@localhost gcp-test]# /usr/share/logstash/bin/logstash -f /opt/gcp-test/gcp.conf
Using bundled JDK: /usr/share/logstash/jdk
...
{
"ClientTlsProtocol" => "unknown",
"DisconnectTimestamp" => "1970-01-01T00:00:00Z",
"ClientPort" => 49531,
"ClientTcpRtt" => 0,
"Event" => "connect",
"ClientCountry" => "jp",
"path" => "/opt/gcp-test/log.log",
"ClientMatchedIpFirewall" => "ALLOW",
"ConnectTimestamp" => "2021-10-18T06:43:17Z",
"OriginTlsCipher" => "unknown",
"OriginIP" => "{origin ip}",
"message" => "{\"Application\":\"cb9c4c042e414b998d7afeec60ac1893\",\"ClientAsn\":9605,\"ClientBytes\":0,\"ClientCountry\":\"jp\",\"ClientIP\":\"49.96.14.95\",\"ClientMatchedIpFirewall\":\"ALLOW\",\"ClientPort\":49531,\"ClientProto\":\"tcp\",\"ClientTcpRtt\":0,\"ClientTlsCipher\":\"unknown\",\"ClientTlsClientHelloServerName\":\"\",\"ClientTlsProtocol\":\"unknown\",\"ClientTlsStatus\":\"UNKNOWN\",\"ColoCode\":\"KIX\",\"ConnectTimestamp\":\"2021-10-18T06:43:17Z\",\"DisconnectTimestamp\":\"1970-01-01T00:00:00Z\",\"Event\":\"connect\",\"IpFirewall\":true,\"OriginBytes\":0,\"OriginIP\":\"103.89.68.120\",\"OriginPort\":20104,\"OriginProto\":\"tcp\",\"OriginTcpRtt\":0,\"OriginTlsCipher\":\"unknown\",\"OriginTlsFingerprint\":\"0000000000000000000000000000000000000000000000000000000000000000\",\"OriginTlsMode\":\"off\",\"OriginTlsProtocol\":\"unknown\",\"OriginTlsStatus\":\"UNKNOWN\",\"ProxyProtocol\":\"off\",\"Status\":201,\"Timestamp\":\"2021-10-18T06:43:17Z\"}",
"OriginBytes" => 0,
"@timestamp" => 2021-10-19T01:49:43.349Z,
"ClientTlsStatus" => "UNKNOWN",
"Timestamp" => "2021-10-18T06:43:17Z",
"ClientTlsClientHelloServerName" => "",
"ClientAsn" => 9605,
"OriginTlsProtocol" => "unknown",
"OriginTlsMode" => "off",
"OriginTlsStatus" => "UNKNOWN",
"Status" => 201,
"ColoCode" => "KIX",
"OriginPort" => {origin port},
"OriginTcpRtt" => 0,
"ProxyProtocol" => "off",
"IpFirewall" => true,
"Application" => "cb9c4c042e414b998d7afeec60ac1893",
"ClientBytes" => 0,
"OriginProto" => "tcp",
"ClientTlsCipher" => "unknown",
"OriginTlsFingerprint" => "0000000000000000000000000000000000000000000000000000000000000000",
"@version" => "1",
"ClientIP" => "{client ip}",
"ClientProto" => "tcp",
"host" => "localhost.localdomain"
}
...
# 확인
[root@localhost gcp-test]# curl -XGET "http://localhost:9200/demo-json/_search?pretty=true"
# mutate 테스트
# mutate는 데이터 필터의 전처리 과정
[root@localhost gcp-test]# cat gcp.conf
input {
file {
start_position => "beginning"
path => "/opt/gcp-test/log.log"
sincedb_path => "/dev/null"
}
}
filter {
json {
source => "message"
}
mutate {
remove_field => ["message", "@timestamp", "path", "host", "@version"] # message, @timestamp,.. 등 필드를 삭제
}
}
output {
elasticsearch {
hosts => "http://localhost:9200"
index => "demo-json"
}
stdout {}
}
# 실행
[root@localhost gcp-test]# /usr/share/logstash/bin/logstash -f /opt/gcp-test/gcp.conf.bak
Using bundled JDK: /usr/share/logstash/jdk
...
{
"ClentProto" => "tcp",
"OriginTlsStatus" => "UNKNOWN",
"ClientTlsProtocol" => "unknown",
"Application" => "5fb836cc91ab4ce4b130eb94bf8cbb7a",
"Timestamp" => "2021-10-18T06:43:22Z",
"IpFirewall" => true,
"ClientCountry" => "hk",
"DisconnectTimestamp" => "2021-10-18T06:43:22Z",
"ClientTlsCipher" => "unknown",
"ClientTcpRtt" => 28238000,
"Event" => "disconnect",
"OriginTcpRtt" => 35709000,
"ProxyProtocol" => "off",
"OriginProto" => "tcp",
"OriginPort" => {origin port},
"ClientBytes" => 8359,
"OriginTlsProtocol" => "unknown",
"ClientTlsStatus" => "UNKNOWN",
"ClientAsn" => 9231,
"Status" => 200,
"ClientMatchedIpFirewall" => "ALLOW",
"OriginTlsCipher" => "unknown",
"OriginTlsFingerprint" => "0000000000000000000000000000000000000000000000000000000000000000",
"OriginBytes" => 140710,
"OriginTlsMode" => "off",
"ColoCode" => "HKG",
"OriginIP" => "{origin ip}",
"ConnectTimestamp" => "2021-10-18T06:36:31Z",
"ClientPort" => 9167,
"ClientTlsClientHelloServerName" => "",
"ClientIP" => "{client ip}"
}
...
반응형
'Monitoring Tools > ELK Stack' 카테고리의 다른 글
5.2 grok pattern을 이용한 nginx log 파싱 (0) | 2022.04.22 |
---|---|
5.1 [logstash filter] 시간대 변경 (0) | 2022.04.22 |
4.1 index pattern id 조회 (0) | 2022.04.22 |
4. Kibana 설정 (0) | 2022.04.22 |
3.1 Index Alias 변경 (0) | 2022.04.22 |