]>
gitweb.michael.orlitzky.com - djbdns-logparse.git/blob - bin/djbdns-logparse.py
067d9b17749cbc87b9b786dcb11f4054cb68ba79
3 Convert tinydns and dnscache logs to human-readable form
7 from struct
import pack
8 from time
import strftime
, gmtime
9 from subprocess
import Popen
, PIPE
12 ## Regular expressions for matching tinydns/dnscache log lines. We
13 ## compile these once here rather than within the corresponding
14 ## matching functions, because the latter get executed repeatedly.
16 # This first pattern is used to match the timestamp format that the
17 # tai64nlocal program produces. It appears in both dnscache and
18 # tinydns lines, after they've been piped through tai64nlocal, of
20 timestamp_pat
= r
'[\d-]+ [\d:\.]+'
22 # The regex to match dnscache log lines.
23 dnscache_log_re
= re
.compile(fr
'({timestamp_pat}) (\w+)(.*)')
25 # The "hex4" pattern matches a string of four hexadecimal digits. This
26 # is used, for example, by tinydns to encode the query type
28 hex4_pat
= r
'[0-9a-f]{4}'
30 # The IP pattern matches a string of either 8 or 32 hexadecimal
31 # characters, which correspond to IPv4 and IPv6 addresses,
32 # respectively, in tinydns logs.
33 ip_pat
= r
'[0-9a-f]{8,32}'
35 # The regex to match tinydns log lines.
36 tinydns_log_re
= re
.compile(
37 rf
'({timestamp_pat}) ({ip_pat}):({hex4_pat}):({hex4_pat}) ([\+\-IC/]) ({hex4_pat}) (.*)'
40 # A dictionary mapping query type identifiers, in decimal, to their
41 # friendly names for tinydns. Reference:
43 # https://en.wikipedia.org/wiki/List_of_DNS_record_types
45 # Note that mapping here is non-exhaustive, and that tinydns will
46 # log responses for record types that it does not know about.
71 # tinydns can drop a query for one of three reasons; this dictionary
72 # maps the symbol that gets logged in each case to a human-readable
81 def convert_ip(ip
: str):
83 Convert a hex string representing an IP address to conventional
84 human-readable form, ie. dotted-quad decimal for IPv4, and
85 8 colon-separated hex shorts for IPv6.
90 >>> convert_ip("7f000001")
92 >>> convert_ip("00000000000000000000ffff7f000001")
93 '0000:0000:0000:0000:0000:ffff:7f00:0001'
97 # IPv4, eg. "7f000001" -> "7f 00 00 01" -> "127.0.0.1"
98 return "%d.%d.%d.%d" % tuple(pack(">L", int(ip
, 16)))
100 # IPv6 is actually simpler -- it's just a string-slicing operation.
101 return ":".join([ip
[(4*i
) : (4*i
+4)] for i
in range(8)])
104 def decode_client(words
, i
):
105 chunks
= words
[i
].split(":")
106 if len(chunks
) == 2: # ip:port
107 words
[i
] = "%s:%d" % (convert_ip(chunks
[0]), int(chunks
[1], 16))
108 elif len(chunks
) == 3:
109 words
[i
] = "%s:%d (id %d)" % (convert_ip(chunks
[0]),
113 def decode_ip(words
, i
):
114 words
[i
] = convert_ip(words
[i
])
116 def decode_ttl(words
, i
):
117 words
[i
] = "TTL=%s" % words
[i
]
119 def decode_serial(words
, i
):
120 serial
= int(words
[i
])
121 words
[i
] = "#%d" % serial
123 def decode_type(words
, i
):
125 words
[i
] = query_type
.get(int(qt
), qt
)
127 def handle_dnscache_log(line
, match
):
128 (timestamp
, event
, data
) = match
.groups()
131 if event
== "cached":
132 if words
[0] not in ("cname", "ns", "nxdomain"):
133 decode_type(words
, 0)
135 elif event
== "drop":
136 decode_serial(words
, 0)
138 elif event
== "lame":
141 elif event
== "nodata":
144 decode_type(words
, 2)
146 elif event
== "nxdomain":
150 elif event
== "query":
151 decode_serial(words
, 0)
152 decode_client(words
, 1)
153 decode_type(words
, 2)
158 if words
[2] not in ("cname", "mx", "ns", "ptr", "soa"):
159 decode_type(words
, 2)
160 if words
[2] == "a": # decode answer to an A query
162 if words
[2] == "txt": # text record
164 if response
.endswith("..."):
166 response
= response
[0:-3]
169 length
= int(response
[0:2], 16)
171 for i
in range(1, len(response
)/2):
172 chars
.append(chr(int(response
[2*i
: (2*i
)+2], 16)))
173 words
[4] = "%d:\"%s%s\"" % (length
, "".join(chars
), ellipsis
)
175 elif event
== "sent":
176 decode_serial(words
, 0)
178 elif event
== "stats":
179 words
[0] = "count=%s" % words
[0]
180 words
[1] = "motion=%s" % words
[1]
181 words
[2] = "udp-active=%s" % words
[2]
182 words
[3] = "tcp-active=%s" % words
[3]
185 words
[0] = "g=%s" % words
[0]
186 decode_type(words
, 1)
188 # words[3] = control (domain for which these servers are believed
189 # to be authoritative)
190 for i
in range(4, len(words
)):
193 elif event
in ("tcpopen", "tcpclose"):
194 decode_client(words
, 0)
196 print(timestamp
, event
, " ".join(words
))
199 def handle_tinydns_log(line
: str, match
: re
.Match
):
201 Handle a line that matched the ``tinydns_log_re`` regex.
207 The tinydns log line that matched ``tinydns_log_re``.
210 The match object that was returned when ``line`` was
211 tested against ``tinydns_log_re``.
216 >>> line = "2022-09-14 21:04:40.206516500 7f000001:9d61:be69 - 0001 www.example.com"
217 >>> match = tinydns_log_re.match(line)
218 >>> handle_tinydns_log(line, match)
219 2022-09-14 21:04:40.206516500 dropped query (no authority) from 127.0.0.1:40289 (id 48745): a www.example.com
222 (timestamp
, ip
, port
, id, code
, type, name
) = match
.groups()
227 # Convert the "type" field to a human-readable record type name
228 # using the query_type dictionary. If the right name isn't present
229 # in the dictionary, we use the (decimal) type id instead.
230 type = int(type, 16) # "001c" -> 28
231 type = query_type
.get(type, type) # 28 -> "aaaa"
233 print(timestamp
, end
=' ')
236 print ("sent response to %s:%s (id %s): %s %s"
237 % (ip
, port
, id, type, name
))
238 elif code
in ("-", "I", "C"):
239 reason
= query_drop_reason
[code
]
240 print ("dropped query (%s) from %s:%s (id %s): %s %s"
241 % (reason
, ip
, port
, id, type, name
))
243 print ("dropped query (couldn't parse) from %s:%s"
246 print ("%s from %s:%s (id %s): %s %s"
247 % (code
, ip
, port
, id, type, name
))
250 def parse_logfile(file : typing
.TextIO
):
252 Process a single log ``file``.
258 An open log file, or stdin.
263 >>> line = "@4000000063227a320c4f3114 7f000001:9d61:be69 - 0001 www.example.com\n"
264 >>> from tempfile import NamedTemporaryFile
265 >>> with NamedTemporaryFile(mode="w", delete=False) as f:
266 ... _ = f.write(line)
267 >>> f = open(f.name, 'r')
269 2022-09-14 21:04:40.206516500 dropped query (no authority) from 127.0.0.1:40289 (id 48745): a www.example.com
271 >>> from os import remove
275 # Open pipe to tai64nlocal: we will write lines of our input (the
276 # raw log file) to it, and read log lines with readable timestamps
278 tai
= Popen(["tai64nlocal"], stdin
=PIPE
, stdout
=PIPE
, text
=True, bufsize
=0)
281 tai
.stdin
.write(line
)
282 line
= tai
.stdout
.readline()
284 match
= tinydns_log_re
.match(line
)
286 handle_tinydns_log(line
, match
)
289 match
= dnscache_log_re
.match(line
)
291 handle_dnscache_log(line
, match
)
297 # Create an argument parser using the file's docsctring as its
299 from argparse
import ArgumentParser
, FileType
300 parser
= ArgumentParser(description
= __doc__
)
302 # Parse zero or more positional arguments into a list of
303 # "logfiles". If none are given, read from stdin instead.
304 from sys
import stdin
305 parser
.add_argument("logfiles",
310 help="djbdns logfile to process (default: stdin)")
312 # Warning: argparse automatically opens its file arguments here,
313 # and they only get closed when the program terminates. There's no
314 # real benefit to closing them one-at-a-time after calling
315 # parse_logfile(), because the "scarce" resource of open file
316 # descriptors gets consumed immediately, before any processing has
317 # happened. In other words, if you're going to run out of file
318 # descriptors, it's going to happen right now.
320 # So anyway, don't run this on several million logfiles.
321 args
= parser
.parse_args()
322 for f
in args
.logfiles
:
326 if __name__
== "__main__":