Initial commit: add .gitignore and README
This commit is contained in:
18
logstash/Dockerfile
Normal file
18
logstash/Dockerfile
Normal file
@@ -0,0 +1,18 @@
|
||||
FROM docker.elastic.co/logstash/logstash:7.14.1
|
||||
|
||||
ENV ES_HOST http://localhost:9200
|
||||
ENV REDIS_HOST localhost
|
||||
ENV REDIS_PORT 6379
|
||||
|
||||
# Remove exisiting file
|
||||
RUN mkdir -p /usr/share/logstash/logs && \
|
||||
rm -f /usr/share/logstash/pipeline/logstash.conf
|
||||
|
||||
# Add pipeline files
|
||||
ADD pipeline/ /usr/share/logstash/pipeline/
|
||||
|
||||
# Add configuration files
|
||||
ADD config/ /usr/share/logstash/config/
|
||||
|
||||
# Test the configuration
|
||||
RUN /usr/share/logstash/bin/logstash -t
|
||||
40
logstash/config/log4j2.properties
Normal file
40
logstash/config/log4j2.properties
Normal file
@@ -0,0 +1,40 @@
|
||||
status = trace
|
||||
name = LogstashPropertiesConfig
|
||||
|
||||
appender.console.type = Console
|
||||
appender.console.name = plain_console
|
||||
appender.console.layout.type = PatternLayout
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c]%notEmpty{[%X{pipeline.id}]}%notEmpty{[%X{plugin.id}]} %m%n
|
||||
|
||||
appender.json_console.type = Console
|
||||
appender.json_console.name = json_console
|
||||
appender.json_console.layout.type = JSONLayout
|
||||
appender.json_console.layout.compact = true
|
||||
appender.json_console.layout.eventEol = true
|
||||
|
||||
appender.rolling.type = RollingFile
|
||||
appender.rolling.name = plain_rolling
|
||||
appender.rolling.fileName = ${sys:ls.logs}/logstash-${env:HOSTNAME}.log
|
||||
appender.rolling.filePattern = ${sys:ls.logs}/logstash-${env:HOSTNAME}-%d{yyyy-MM-dd}.bak
|
||||
appender.rolling.policies.type = Policies
|
||||
appender.rolling.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.rolling.policies.time.interval = 1
|
||||
appender.rolling.policies.time.modulate = true
|
||||
appender.rolling.layout.type = PatternLayout
|
||||
appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %-.10000m%n
|
||||
|
||||
appender.json_rolling.type = RollingFile
|
||||
appender.json_rolling.name = json_rolling
|
||||
appender.json_rolling.fileName = ${sys:ls.logs}/logstash-${env:HOSTNAME}.log
|
||||
appender.json_rolling.filePattern = ${sys:ls.logs}/logstash-${env:HOSTNAME}-%d{yyyy-MM-dd}.bak
|
||||
appender.json_rolling.policies.type = Policies
|
||||
appender.json_rolling.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.json_rolling.policies.time.interval = 1
|
||||
appender.json_rolling.policies.time.modulate = true
|
||||
appender.json_rolling.layout.type = JSONLayout
|
||||
appender.json_rolling.layout.compact = true
|
||||
appender.json_rolling.layout.eventEol = true
|
||||
|
||||
rootLogger.level = ${sys:ls.log.level}
|
||||
rootLogger.appenderRef.console.ref = ${sys:ls.log.format}_console
|
||||
rootLogger.appenderRef.rolling.ref = ${sys:ls.log.format}_rolling
|
||||
13
logstash/config/logstash.yml
Normal file
13
logstash/config/logstash.yml
Normal file
@@ -0,0 +1,13 @@
|
||||
# ensure name is set so that monitoring in Kibana is easy to identify
|
||||
node.name: "logstash-quorum"
|
||||
|
||||
log.format: json
|
||||
path:
|
||||
config: /usr/share/logstash/pipeline
|
||||
logs: /usr/share/logstash/logs
|
||||
|
||||
xpack:
|
||||
monitoring:
|
||||
enabled: true
|
||||
elasticsearch:
|
||||
hosts: ${ES_HOST}
|
||||
9
logstash/pipeline/10_filebeat_redis.conf
Normal file
9
logstash/pipeline/10_filebeat_redis.conf
Normal file
@@ -0,0 +1,9 @@
|
||||
input {
|
||||
redis {
|
||||
host => "${REDIS_HOST}"
|
||||
port => "${REDIS_PORT}"
|
||||
codec => "json"
|
||||
data_type => "list"
|
||||
key => "filebeat"
|
||||
}
|
||||
}
|
||||
11
logstash/pipeline/10_metricbeat_redis.conf
Normal file
11
logstash/pipeline/10_metricbeat_redis.conf
Normal file
@@ -0,0 +1,11 @@
|
||||
input {
|
||||
redis {
|
||||
host => "${REDIS_HOST}"
|
||||
port => "${REDIS_PORT}"
|
||||
codec => "json"
|
||||
data_type => "list"
|
||||
type => "metricbeat"
|
||||
key => "metricbeat"
|
||||
}
|
||||
}
|
||||
|
||||
11
logstash/pipeline/20_besu.conf
Normal file
11
logstash/pipeline/20_besu.conf
Normal file
@@ -0,0 +1,11 @@
|
||||
filter {
|
||||
if [service_name] == "besu" {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
date {
|
||||
match => [ "timestamp" , "ISO8601" ]
|
||||
remove_field => [ "timestamp" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
11
logstash/pipeline/20_logstash.conf
Normal file
11
logstash/pipeline/20_logstash.conf
Normal file
@@ -0,0 +1,11 @@
|
||||
filter {
|
||||
if [service_name] == "logstash" {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
date {
|
||||
match => [ "timeMillis", "UNIX_MS" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
21
logstash/pipeline/20_quorum.conf
Normal file
21
logstash/pipeline/20_quorum.conf
Normal file
@@ -0,0 +1,21 @@
|
||||
filter {
|
||||
if [service_name] == "quorum" {
|
||||
grok {
|
||||
match => { "message" => "%{GREEDYDATA:level}\[%{MONTHNUM:monthnum}\-%{DATA:monthday}\|%{TIME:time}\] %{GREEDYDATA:message}" }
|
||||
overwrite => [ "message" ]
|
||||
}
|
||||
mutate {
|
||||
add_field => {
|
||||
"timestamp" => "%{monthnum} %{monthday} %{time}"
|
||||
}
|
||||
}
|
||||
date {
|
||||
match => [ "timestamp" , "MM dd HH:mm:ss.SSS", "MM dd HH:mm:ss.SSS", "ISO8601"]
|
||||
remove_field => [ "timestamp" ]
|
||||
}
|
||||
mutate {
|
||||
remove_field => [ "time" ,"month","monthday","year","timestamp"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
11
logstash/pipeline/20_tessera.conf
Normal file
11
logstash/pipeline/20_tessera.conf
Normal file
@@ -0,0 +1,11 @@
|
||||
filter {
|
||||
if [service_name] == "tessera" {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
date {
|
||||
match => [ "timestamp" , "ISO8601" ]
|
||||
remove_field => [ "timestamp" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
44
logstash/pipeline/30_elasticsearch.conf
Normal file
44
logstash/pipeline/30_elasticsearch.conf
Normal file
@@ -0,0 +1,44 @@
|
||||
output {
|
||||
if [service_name] == "logstash" {
|
||||
elasticsearch {
|
||||
hosts => ["${ES_HOST}"]
|
||||
index => "logstash-%{+YYYY.MM.dd}"
|
||||
}
|
||||
}
|
||||
|
||||
else if [service_name] == "besu" {
|
||||
elasticsearch {
|
||||
hosts => ["${ES_HOST}"]
|
||||
index => "besu-%{+YYYY.MM.dd}"
|
||||
}
|
||||
}
|
||||
|
||||
else if [service_name] == "tessera" {
|
||||
elasticsearch {
|
||||
hosts => ["${ES_HOST}"]
|
||||
index => "tessera-%{+YYYY.MM.dd}"
|
||||
}
|
||||
}
|
||||
|
||||
else if [service_name] == "quorum" {
|
||||
elasticsearch {
|
||||
hosts => ["${ES_HOST}"]
|
||||
index => "quorum-%{+YYYY.MM.dd}"
|
||||
}
|
||||
}
|
||||
|
||||
else if [type] == "metricbeat" {
|
||||
elasticsearch {
|
||||
hosts => ["${ES_HOST}"]
|
||||
index => "metricbeat-%{+YYYY.MM.dd}"
|
||||
}
|
||||
}
|
||||
|
||||
else {
|
||||
elasticsearch {
|
||||
hosts => ["${ES_HOST}"]
|
||||
index => "unknown-%{+YYYY.MM.dd}"
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
Reference in New Issue
Block a user