tsf ckafka logstash es
{"cluster-id":"cls-*","application-id":"application-****","message":"{\"log_time\":\"2020-04-25 11:33:07.178\",\"logType\":\"app_java\",\"level\":\"INFO\",\"thread\":\"tsf-schedule-0-T3\",\"trace\":\"[,,,]\",\"skywalkingTraceId\":\"TID: N/A\",\"class_name\":\"*.*.rule.TsfCircuitBreakerConsulKVLoader\",\"method_name\":\"syncCircuitBreakerRule\",\"code_line\":63,\"message\":\"[TSF CIRCUIT BREAKER LOADER] TSF circuit breaker loader start NEW round, circuitBreakerRuleIndex: 8295189\"}","namespace-id":"namespace-***","instance-id":"**-**-*-online-***-5lddz","offset":86002,"type":"log","app-id":"*****","local-ip":"*.*.*.*","fields":{"topicname":"app-log"},"appgroup-id":"group-*","@version":"1","@timestamp":"2020-04-25T03:33:07.715Z"}
yum install java-1.8.0-openjdk
cd /usr/local
nohup wget https://artifacts.elastic.co/downloads/logstash/logstash-7.6.2.zip &
unzip logstash-7.6.2.zip
[root@VM_10_41_centos config]# pwd
/usr/local/logstash-7.6.2/config
[root@VM_10_41_centos config]# cat logstash.conf
# Sample Logstash configuration for creating a simple
# Beats -> Logstash -> Elasticsearch pipeline.
input
{
kafka
{
bootstrap_servers = > "10.1.1.1:9092"
topics = > ["app-log"]
group_id = > "app-log-kafka"
codec = > json
{charset = > "UTF-8"}
}
}
filter
{
json
{
source = > "message"
}
}
output
{
file
{
path = > "/usr/local/logstash-7.6.2/tmp/app.log"
flush_interval = > 0
}
elasticsearch
{
hosts = > ["http://10.1.1.2:9200"]
index = > "wz-log-%{+YYYY.MM.dd}"
}
}