Skip to content

Commit 74658f9

Browse files
authored
Merge pull request #97 from cosmo0920/provide-ecs-v1-grok-patterns
Provide ecs-v1 grok patterns
2 parents cc42eb8 + 18f99ff commit 74658f9

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

52 files changed

+831
-90
lines changed

README.md

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ extracts the first IP address that matches in the log.
4646
<parse>
4747
@type grok
4848
<grok>
49-
pattern %{COMBINEDAPACHELOG}
49+
pattern %{HTTPD_COMBINEDLOG}
5050
time_format "%d/%b/%Y:%H:%M:%S %z"
5151
</grok>
5252
<grok>
@@ -106,6 +106,8 @@ You can use this parser without `multiline_start_regexp` when you know your data
106106
* **grok_failure_key** (string) (optional): The key has grok failure reason.
107107
* **grok_name_key** (string) (optional): The key name to store grok section's name
108108
* **multi_line_start_regexp** (string) (optional): The regexp to match beginning of multiline. This is only for "multiline_grok".
109+
* **grok_pattern_series** (enum) (optional): Specify grok pattern series set.
110+
* Default value: `legacy`.
109111

110112
### \<grok\> section (optional) (multiple)
111113

@@ -173,7 +175,7 @@ This generates following events:
173175
grok_failure_key grokfailure
174176
<grok>
175177
name apache_log
176-
pattern %{COMBINEDAPACHELOG}
178+
pattern %{HTTPD_COMBINEDLOG}
177179
time_format "%d/%b/%Y:%H:%M:%S %z"
178180
</grok>
179181
<grok>
@@ -190,7 +192,7 @@ This generates following events:
190192

191193
This will add keys like following:
192194

193-
* Add `grok_name: "apache_log"` if the record matches `COMBINEDAPACHELOG`
195+
* Add `grok_name: "apache_log"` if the record matches `HTTPD_COMBINEDLOG`
194196
* Add `grok_name: "ip_address"` if the record matches `IP`
195197
* Add `grok_name: "rest_message"` if the record matches `GREEDYDATA`
196198

Rakefile

Lines changed: 25 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,11 @@ end
1515

1616
desc "Import patterns from submodules"
1717
task "patterns:import" do
18-
`git submodule --quiet foreach pwd`.split($\).each do |submodule_path|
19-
Dir.glob(File.join(submodule_path, "patterns/*")) do |pattern|
20-
cp(pattern, "patterns/", verbose: true)
18+
["legacy", "ecs-v1"].each do |series|
19+
`git submodule --quiet foreach pwd`.split($\).each do |submodule_path|
20+
Dir.glob(File.join(submodule_path, "patterns/#{series}/*")) do |pattern|
21+
cp(pattern, "patterns/#{series}", verbose: true)
22+
end
2123
end
2224
end
2325

@@ -32,29 +34,30 @@ task "patterns:import" do
3234
array(?::.)?)))?)?
3335
)
3436
\}/x
35-
36-
Dir.glob("patterns/*") do |pattern_file|
37-
new_lines = ""
38-
File.readlines(pattern_file).each do |line|
39-
case
40-
when line.strip.empty?
41-
new_lines << line
42-
when line.start_with?("#")
43-
new_lines << line
44-
else
45-
name, pattern = line.split(/\s+/, 2)
46-
new_pattern = pattern.gsub(pattern_re) do |m|
47-
matched = $~
48-
if matched[:type] == "int"
49-
"%{#{matched[:pattern]}:#{matched[:subname]}:integer}"
50-
else
51-
m
37+
["legacy", "ecs-v1"].each do |series|
38+
Dir.glob("patterns/#{series}/*") do |pattern_file|
39+
new_lines = ""
40+
File.readlines(pattern_file).each do |line|
41+
case
42+
when line.strip.empty?
43+
new_lines << line
44+
when line.start_with?("#")
45+
new_lines << line
46+
else
47+
name, pattern = line.split(/\s+/, 2)
48+
new_pattern = pattern.gsub(pattern_re) do |m|
49+
matched = $~
50+
if matched[:type] == "int"
51+
"%{#{matched[:pattern]}:#{matched[:subname]}:integer}"
52+
else
53+
m
54+
end
5255
end
56+
new_lines << "#{name} #{new_pattern}"
5357
end
54-
new_lines << "#{name} #{new_pattern}"
5558
end
59+
File.write(pattern_file, new_lines)
5660
end
57-
File.write(pattern_file, new_lines)
5861
end
5962
end
6063

lib/fluent/plugin/grok.rb

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -134,8 +134,15 @@ def expand_pattern(pattern)
134134
curr_pattern = @pattern_map[m["pattern"]]
135135
raise GrokPatternNotFoundError, "grok pattern not found: #{pattern}" unless curr_pattern
136136
if m["subname"]
137-
replacement_pattern = "(?<#{m["subname"]}>#{curr_pattern})"
138-
type_map[m["subname"]] = m["type"] || "string"
137+
ecs = /(?<ecs-key>(^\[.*\]$))/.match(m["subname"])
138+
subname = if ecs
139+
# remove starting "[" and trailing "]" on matched data
140+
ecs["ecs-key"][1..-2].split("][").join('.')
141+
else
142+
m["subname"]
143+
end
144+
replacement_pattern = "(?<#{subname}>#{curr_pattern})"
145+
type_map[subname] = m["type"] || "string"
139146
else
140147
replacement_pattern = "(?:#{curr_pattern})"
141148
end

lib/fluent/plugin/parser_grok.rb

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@ class GrokParser < Parser
1616
config_param :grok_failure_key, :string, default: nil
1717
desc "The key name to store grok section's name"
1818
config_param :grok_name_key, :string, default: nil
19+
desc "Specify grok pattern series set"
20+
config_param :grok_pattern_series, :enum, list: [:legacy, :"ecs-v1"], default: :legacy
1921

2022
config_section :grok, param_name: "grok_confs", multi: true do
2123
desc "The name of this grok section"
@@ -42,7 +44,7 @@ def configure(conf={})
4244

4345
@grok = Grok.new(self, conf)
4446

45-
default_pattern_dir = File.expand_path("../../../../patterns/*", __FILE__)
47+
default_pattern_dir = File.expand_path("../../../../patterns/#{@grok_pattern_series}/*", __FILE__)
4648
Dir.glob(default_pattern_dir) do |pattern_file_path|
4749
@grok.add_patterns_from_file(pattern_file_path)
4850
end

patterns/bind

Lines changed: 0 additions & 3 deletions
This file was deleted.

patterns/ecs-v1/aws

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
S3_REQUEST_LINE (?:%{WORD:[http][request][method]} %{NOTSPACE:[url][original]}(?: HTTP/%{NUMBER:[http][version]})?)
2+
3+
S3_ACCESS_LOG %{WORD:[aws][s3access][bucket_owner]} %{NOTSPACE:[aws][s3access][bucket]} \[%{HTTPDATE:timestamp}\] (?:-|%{IP:[client][ip]}) (?:-|%{NOTSPACE:[client][user][id]}) %{NOTSPACE:[aws][s3access][request_id]} %{NOTSPACE:[aws][s3access][operation]} (?:-|%{NOTSPACE:[aws][s3access][key]}) (?:-|"%{S3_REQUEST_LINE:[aws][s3access][request_uri]}") (?:-|%{INT:[http][response][status_code]:integer}) (?:-|%{NOTSPACE:[aws][s3access][error_code]}) (?:-|%{INT:[aws][s3access][bytes_sent]:integer}) (?:-|%{INT:[aws][s3access][object_size]:integer}) (?:-|%{INT:[aws][s3access][total_time]:integer}) (?:-|%{INT:[aws][s3access][turn_around_time]:integer}) "(?:-|%{DATA:[http][request][referrer]})" "(?:-|%{DATA:[user_agent][original]})" (?:-|%{NOTSPACE:[aws][s3access][version_id]})(?: (?:-|%{NOTSPACE:[aws][s3access][host_id]}) (?:-|%{NOTSPACE:[aws][s3access][signature_version]}) (?:-|%{NOTSPACE:[tls][cipher]}) (?:-|%{NOTSPACE:[aws][s3access][authentication_type]}) (?:-|%{NOTSPACE:[aws][s3access][host_header]}) (?:-|%{NOTSPACE:[aws][s3access][tls_version]}))?
4+
# :long - %{INT:[aws][s3access][bytes_sent]:int}
5+
# :long - %{INT:[aws][s3access][object_size]:int}
6+
7+
ELB_URIHOST %{IPORHOST:[url][domain]}(?::%{POSINT:[url][port]:integer})?
8+
ELB_URIPATHQUERY %{URIPATH:[url][path]}(?:\?%{URIQUERY:[url][query]})?
9+
# deprecated - old name:
10+
ELB_URIPATHPARAM %{ELB_URIPATHQUERY}
11+
ELB_URI %{URIPROTO:[url][scheme]}://(?:%{USER:[url][username]}(?::[^@]*)?@)?(?:%{ELB_URIHOST})?(?:%{ELB_URIPATHQUERY})?
12+
13+
ELB_REQUEST_LINE (?:%{WORD:[http][request][method]} %{ELB_URI:[url][original]}(?: HTTP/%{NUMBER:[http][version]})?)
14+
15+
# pattern supports 'regular' HTTP ELB format
16+
ELB_V1_HTTP_LOG %{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE:[aws][elb][name]} %{IP:[source][ip]}:%{INT:[source][port]:integer} (?:-|(?:%{IP:[aws][elb][backend][ip]}:%{INT:[aws][elb][backend][port]:integer})) (?:-1|%{NUMBER:[aws][elb][request_processing_time][sec]:float}) (?:-1|%{NUMBER:[aws][elb][backend_processing_time][sec]:float}) (?:-1|%{NUMBER:[aws][elb][response_processing_time][sec]:float}) %{INT:[http][response][status_code]:integer} (?:-|%{INT:[aws][elb][backend][http][response][status_code]:integer}) %{INT:[http][request][body][bytes]:integer} %{INT:[http][response][body][bytes]:integer} "%{ELB_REQUEST_LINE}"(?: "(?:-|%{DATA:[user_agent][original]})" (?:-|%{NOTSPACE:[tls][cipher]}) (?:-|%{NOTSPACE:[aws][elb][ssl_protocol]}))?
17+
# :long - %{INT:[http][request][body][bytes]:int}
18+
# :long - %{INT:[http][response][body][bytes]:int}
19+
20+
ELB_ACCESS_LOG %{ELB_V1_HTTP_LOG}
21+
22+
# pattern used to match a shorted format, that's why we have the optional part (starting with *http.version*) at the end
23+
CLOUDFRONT_ACCESS_LOG (?<timestamp>%{YEAR}-%{MONTHNUM}-%{MONTHDAY}\t%{TIME})\t%{WORD:[aws][cloudfront][x_edge_location]}\t(?:-|%{INT:[destination][bytes]:integer})\t%{IPORHOST:[source][ip]}\t%{WORD:[http][request][method]}\t%{HOSTNAME:[url][domain]}\t%{NOTSPACE:[url][path]}\t(?:(?:000)|%{INT:[http][response][status_code]:integer})\t(?:-|%{DATA:[http][request][referrer]})\t%{DATA:[user_agent][original]}\t(?:-|%{DATA:[url][query]})\t(?:-|%{DATA:[aws][cloudfront][http][request][cookie]})\t%{WORD:[aws][cloudfront][x_edge_result_type]}\t%{NOTSPACE:[aws][cloudfront][x_edge_request_id]}\t%{HOSTNAME:[aws][cloudfront][http][request][host]}\t%{URIPROTO:[network][protocol]}\t(?:-|%{INT:[source][bytes]:integer})\t%{NUMBER:[aws][cloudfront][time_taken]:float}\t(?:-|%{IP:[network][forwarded_ip]})\t(?:-|%{DATA:[aws][cloudfront][ssl_protocol]})\t(?:-|%{NOTSPACE:[tls][cipher]})\t%{WORD:[aws][cloudfront][x_edge_response_result_type]}(?:\t(?:-|HTTP/%{NUMBER:[http][version]})\t(?:-|%{DATA:[aws][cloudfront][fle_status]})\t(?:-|%{DATA:[aws][cloudfront][fle_encrypted_fields]})\t%{INT:[source][port]:integer}\t%{NUMBER:[aws][cloudfront][time_to_first_byte]:float}\t(?:-|%{DATA:[aws][cloudfront][x_edge_detailed_result_type]})\t(?:-|%{NOTSPACE:[http][request][mime_type]})\t(?:-|%{INT:[aws][cloudfront][http][request][size]:integer})\t(?:-|%{INT:[aws][cloudfront][http][request][range][start]:integer})\t(?:-|%{INT:[aws][cloudfront][http][request][range][end]:integer}))?
24+
# :long - %{INT:[destination][bytes]:int}
25+
# :long - %{INT:[source][bytes]:int}
26+
# :long - %{INT:[aws][cloudfront][http][request][size]:int}
27+
# :long - %{INT:[aws][cloudfront][http][request][range][start]:int}
28+
# :long - %{INT:[aws][cloudfront][http][request][range][end]:int}

patterns/ecs-v1/bacula

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
BACULA_TIMESTAMP %{MONTHDAY}-%{MONTH}(?:-%{YEAR})? %{HOUR}:%{MINUTE}
2+
BACULA_HOST %{HOSTNAME}
3+
BACULA_VOLUME %{USER}
4+
BACULA_DEVICE %{USER}
5+
BACULA_DEVICEPATH %{UNIXPATH}
6+
BACULA_CAPACITY %{INT}{1,3}(,%{INT}{3})*
7+
BACULA_VERSION %{USER}
8+
BACULA_JOB %{USER}
9+
10+
BACULA_LOG_MAX_CAPACITY User defined maximum volume capacity %{BACULA_CAPACITY:[bacula][volume][max_capacity]} exceeded on device \"%{BACULA_DEVICE:[bacula][volume][device]}\" \(%{BACULA_DEVICEPATH:[bacula][volume][path]}\).?
11+
BACULA_LOG_END_VOLUME End of medium on Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" Bytes=%{BACULA_CAPACITY:[bacula][volume][bytes]} Blocks=%{BACULA_CAPACITY:[bacula][volume][blocks]} at %{BACULA_TIMESTAMP:[bacula][timestamp]}.
12+
BACULA_LOG_NEW_VOLUME Created new Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" in catalog.
13+
BACULA_LOG_NEW_LABEL Labeled new Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" on (?:file )?device \"%{BACULA_DEVICE:[bacula][volume][device]}\" \(%{BACULA_DEVICEPATH:[bacula][volume][path]}\).
14+
BACULA_LOG_WROTE_LABEL Wrote label to prelabeled Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" on device \"%{BACULA_DEVICE:[bacula][volume][device]}\" \(%{BACULA_DEVICEPATH:[bacula][volume][path]}\)
15+
BACULA_LOG_NEW_MOUNT New volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" mounted on device \"%{BACULA_DEVICE:[bacula][volume][device]}\" \(%{BACULA_DEVICEPATH:[bacula][volume][path]}\) at %{BACULA_TIMESTAMP:[bacula][timestamp]}.
16+
BACULA_LOG_NOOPEN \s*Cannot open %{DATA}: ERR=%{GREEDYDATA:[error][message]}
17+
BACULA_LOG_NOOPENDIR \s*Could not open directory \"?%{DATA:[file][path]}\"?: ERR=%{GREEDYDATA:[error][message]}
18+
BACULA_LOG_NOSTAT \s*Could not stat %{DATA:[file][path]}: ERR=%{GREEDYDATA:[error][message]}
19+
BACULA_LOG_NOJOBS There are no more Jobs associated with Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\". Marking it purged.
20+
BACULA_LOG_ALL_RECORDS_PRUNED .*?All records pruned from Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\"; marking it \"Purged\"
21+
BACULA_LOG_BEGIN_PRUNE_JOBS Begin pruning Jobs older than %{INT} month %{INT} days .
22+
BACULA_LOG_BEGIN_PRUNE_FILES Begin pruning Files.
23+
BACULA_LOG_PRUNED_JOBS Pruned %{INT} Jobs* for client %{BACULA_HOST:[bacula][client][name]} from catalog.
24+
BACULA_LOG_PRUNED_FILES Pruned Files from %{INT} Jobs* for client %{BACULA_HOST:[bacula][client][name]} from catalog.
25+
BACULA_LOG_ENDPRUNE End auto prune.
26+
BACULA_LOG_STARTJOB Start Backup JobId %{INT}, Job=%{BACULA_JOB:[bacula][job][name]}
27+
BACULA_LOG_STARTRESTORE Start Restore Job %{BACULA_JOB:[bacula][job][name]}
28+
BACULA_LOG_USEDEVICE Using Device \"%{BACULA_DEVICE:[bacula][volume][device]}\"
29+
BACULA_LOG_DIFF_FS \s*%{UNIXPATH} is a different filesystem. Will not descend from %{UNIXPATH} into it.
30+
BACULA_LOG_JOBEND Job write elapsed time = %{DATA:[bacula][job][elapsed_time]}, Transfer rate = %{NUMBER} (K|M|G)? Bytes/second
31+
BACULA_LOG_NOPRUNE_JOBS No Jobs found to prune.
32+
BACULA_LOG_NOPRUNE_FILES No Files found to prune.
33+
BACULA_LOG_VOLUME_PREVWRITTEN Volume \"?%{BACULA_VOLUME:[bacula][volume][name]}\"? previously written, moving to end of data.
34+
BACULA_LOG_READYAPPEND Ready to append to end of Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" size=%{INT:[bacula][volume][size]:integer}
35+
# :long - %{INT:[bacula][volume][size]:int}
36+
BACULA_LOG_CANCELLING Cancelling duplicate JobId=%{INT:[bacula][job][other_id]}.
37+
BACULA_LOG_MARKCANCEL JobId %{INT:[bacula][job][id]}, Job %{BACULA_JOB:[bacula][job][name]} marked to be canceled.
38+
BACULA_LOG_CLIENT_RBJ shell command: run ClientRunBeforeJob \"%{GREEDYDATA:[bacula][job][client_run_before_command]}\"
39+
BACULA_LOG_VSS (Generate )?VSS (Writer)?
40+
BACULA_LOG_MAXSTART Fatal [eE]rror: Job canceled because max start delay time exceeded.
41+
BACULA_LOG_DUPLICATE Fatal [eE]rror: JobId %{INT:[bacula][job][other_id]} already running. Duplicate job not allowed.
42+
BACULA_LOG_NOJOBSTAT Fatal [eE]rror: No Job status returned from FD.
43+
BACULA_LOG_FATAL_CONN Fatal [eE]rror: bsock.c:133 Unable to connect to (Client: %{BACULA_HOST:[bacula][client][name]}|Storage daemon) on %{IPORHOST:[client][address]}:%{POSINT:[client][port]:integer}. ERR=%{GREEDYDATA:[error][message]}
44+
BACULA_LOG_NO_CONNECT Warning: bsock.c:127 Could not connect to (Client: %{BACULA_HOST:[bacula][client][name]}|Storage daemon) on %{IPORHOST:[client][address]}:%{POSINT:[client][port]:integer}. ERR=%{GREEDYDATA:[error][message]}
45+
BACULA_LOG_NO_AUTH Fatal error: Unable to authenticate with File daemon at \"?%{IPORHOST:[client][address]}(?::%{POSINT:[client][port]:integer})?\"?. Possible causes:
46+
BACULA_LOG_NOSUIT No prior or suitable Full backup found in catalog. Doing FULL backup.
47+
BACULA_LOG_NOPRIOR No prior Full backup Job record found.
48+
49+
BACULA_LOG_JOB (Error: )?Bacula %{BACULA_HOST} %{BACULA_VERSION} \(%{BACULA_VERSION}\):
50+
51+
BACULA_LOG %{BACULA_TIMESTAMP:timestamp} %{BACULA_HOST:[host][hostname]}(?: JobId %{INT:[bacula][job][id]})?:? (%{BACULA_LOG_MAX_CAPACITY}|%{BACULA_LOG_END_VOLUME}|%{BACULA_LOG_NEW_VOLUME}|%{BACULA_LOG_NEW_LABEL}|%{BACULA_LOG_WROTE_LABEL}|%{BACULA_LOG_NEW_MOUNT}|%{BACULA_LOG_NOOPEN}|%{BACULA_LOG_NOOPENDIR}|%{BACULA_LOG_NOSTAT}|%{BACULA_LOG_NOJOBS}|%{BACULA_LOG_ALL_RECORDS_PRUNED}|%{BACULA_LOG_BEGIN_PRUNE_JOBS}|%{BACULA_LOG_BEGIN_PRUNE_FILES}|%{BACULA_LOG_PRUNED_JOBS}|%{BACULA_LOG_PRUNED_FILES}|%{BACULA_LOG_ENDPRUNE}|%{BACULA_LOG_STARTJOB}|%{BACULA_LOG_STARTRESTORE}|%{BACULA_LOG_USEDEVICE}|%{BACULA_LOG_DIFF_FS}|%{BACULA_LOG_JOBEND}|%{BACULA_LOG_NOPRUNE_JOBS}|%{BACULA_LOG_NOPRUNE_FILES}|%{BACULA_LOG_VOLUME_PREVWRITTEN}|%{BACULA_LOG_READYAPPEND}|%{BACULA_LOG_CANCELLING}|%{BACULA_LOG_MARKCANCEL}|%{BACULA_LOG_CLIENT_RBJ}|%{BACULA_LOG_VSS}|%{BACULA_LOG_MAXSTART}|%{BACULA_LOG_DUPLICATE}|%{BACULA_LOG_NOJOBSTAT}|%{BACULA_LOG_FATAL_CONN}|%{BACULA_LOG_NO_CONNECT}|%{BACULA_LOG_NO_AUTH}|%{BACULA_LOG_NOSUIT}|%{BACULA_LOG_JOB}|%{BACULA_LOG_NOPRIOR})
52+
# old (deprecated) name :
53+
BACULA_LOGLINE %{BACULA_LOG}

patterns/ecs-v1/bind

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
BIND9_TIMESTAMP %{MONTHDAY}[-]%{MONTH}[-]%{YEAR} %{TIME}
2+
3+
BIND9_DNSTYPE (?:A|AAAA|CAA|CDNSKEY|CDS|CERT|CNAME|CSYNC|DLV|DNAME|DNSKEY|DS|HINFO|LOC|MX|NAPTR|NS|NSEC|NSEC3|OPENPGPKEY|PTR|RRSIG|RP|SIG|SMIMEA|SOA|SRV|TSIG|TXT|URI)
4+
BIND9_CATEGORY (?:queries)
5+
6+
# dns.question.class is static - only 'IN' is supported by Bind9
7+
# bind.log.question.name is expected to be a 'duplicate' (same as the dns.question.name capture)
8+
BIND9_QUERYLOGBASE client(:? @0x(?:[0-9A-Fa-f]+))? %{IP:[client][ip]}#%{POSINT:[client][port]:integer} \(%{GREEDYDATA:[bind][log][question][name]}\): query: %{GREEDYDATA:[dns][question][name]} (?<[dns][question][class]>IN) %{BIND9_DNSTYPE:[dns][question][type]}(:? %{DATA:[bind][log][question][flags]})? \(%{IP:[server][ip]}\)
9+
10+
# for query-logging category and severity are always fixed as "queries: info: "
11+
BIND9_QUERYLOG %{BIND9_TIMESTAMP:timestamp} %{BIND9_CATEGORY:[bing][log][category]}: %{LOGLEVEL:[log][level]}: %{BIND9_QUERYLOGBASE}
12+
13+
BIND9 %{BIND9_QUERYLOG}

0 commit comments

Comments
 (0)