]> gitweb.michael.orlitzky.com - djbdns-logparse.git/blob - bin/djbdns-logparse.py
5262fe0e965d012e744955478a6ecb81b549a984
[djbdns-logparse.git] / bin / djbdns-logparse.py
1 #!/usr/bin/python3
2 r"""
3 Convert tinydns and dnscache logs to human-readable form
4 """
5
6 import re, typing
7 from struct import pack
8
9 ## Regular expressions for matching tinydns/dnscache log lines. We
10 ## compile these once here rather than within the corresponding
11 ## matching functions, because the latter get executed repeatedly.
12
13 # This first pattern is used to match the timestamp format that the
14 # tai64nlocal program produces. It appears in both dnscache and
15 # tinydns lines, after they've been piped through tai64nlocal, of
16 # course.
17 timestamp_pat = r'[\d-]+ [\d:\.]+'
18
19 # The regex to match dnscache log lines.
20 dnscache_log_re = re.compile(fr'({timestamp_pat}) (\w+)(.*)')
21
22 # The "hex4" pattern matches a string of four hexadecimal digits. This
23 # is used, for example, by tinydns to encode the query type
24 # identifier.
25 hex4_pat = r'[0-9a-f]{4}'
26
27 # The IP pattern matches a string of either 8 or 32 hexadecimal
28 # characters, which correspond to IPv4 and IPv6 addresses,
29 # respectively, in tinydns logs.
30 ip_pat = r'[0-9a-f]{8,32}'
31
32 # The regex to match tinydns log lines.
33 tinydns_log_re = re.compile(
34 rf'({timestamp_pat}) ({ip_pat}):({hex4_pat}):({hex4_pat}) ([\+\-IC/]) ({hex4_pat}) (.*)'
35 )
36
37 # A dictionary mapping query type identifiers, in decimal, to their
38 # friendly names for tinydns. Reference:
39 #
40 # https://en.wikipedia.org/wiki/List_of_DNS_record_types
41 #
42 # Note that mapping here is non-exhaustive, and that tinydns will
43 # log responses for record types that it does not know about.
44 query_type = {
45 1: "a",
46 2: "ns",
47 5: "cname",
48 6: "soa",
49 12: "ptr",
50 13: "hinfo",
51 15: "mx",
52 16: "txt",
53 17: "rp",
54 24: "sig",
55 25: "key",
56 28: "aaaa",
57 33: "srv",
58 35: "naptr",
59 38: "a6",
60 48: "dnskey",
61 52: "tlsa",
62 65: "https",
63 252: "axfr",
64 255: "any",
65 257: "caa"
66 }
67
68 # tinydns can drop a query for one of three reasons; this dictionary
69 # maps the symbol that gets logged in each case to a human-readable
70 # reason. We include the "+" case here, indicating that the query was
71 # NOT dropped, to avoid a special case later on when we're formatting
72 # the human-readable output.
73 query_drop_reason = {
74 "+": None,
75 "-": "no authority",
76 "I": "invalid query",
77 "C": "invalid class",
78 "/": "couldn't parse"
79 }
80
81
82 def convert_ip(ip : str) -> str:
83 r"""
84 Convert a hex string representing an IP address to
85 human-readable form.
86
87 Parameters
88 ----------
89
90 ip : str
91 The hexadecimal representation of either an IPv4 or an IPv6
92 address.
93
94 Returns
95 -------
96
97 The usual decimal dotted-quad representation is returned for an
98 IPv4 address. IPv6 addresses are returned almost as-is, but with
99 colons inserted in the appropriate places, between every four
100 characters.
101
102 Examples
103 --------
104
105 >>> convert_ip("7f000001")
106 '127.0.0.1'
107 >>> convert_ip("00000000000000000000ffff7f000001")
108 '0000:0000:0000:0000:0000:ffff:7f00:0001'
109 """
110 if len(ip) == 8:
111 # IPv4, eg. "7f000001" -> "7f 00 00 01" -> "127.0.0.1"
112 return ".".join(map(str, pack(">L", int(ip, 16))))
113 elif len(ip) == 32:
114 # IPv6 is actually simpler -- it's just a string-slicing operation.
115 return ":".join([ip[(4*i) : (4*i+4)] for i in range(8)])
116
117
118 def decode_client(words : list, i : int):
119 r"""
120 Helper function to decode the client field in a dnscache log
121 entry.
122
123 There are two possible formats for the client field,
124
125 1. clientip:clientport, used by tcpopen/tcpclose entries,
126 2. clientip:clientport:id, used by "query" entries.
127
128 Parameters
129 ----------
130
131 words : list
132 The ``words`` list (a list of fields) from
133 :func:`handle_dnscache_log`.
134
135 i : int
136 The index of the client field within ``words``
137
138 Returns
139 -------
140
141 Nothing; the ``i``th entry in the ``words`` list is modified
142 in-place.
143
144 Examples
145 --------
146
147 >>> words = ["foo", "bar", "7f000001:9253", "quux"]
148 >>> decode_client(words, 2)
149 >>> words
150 ['foo', 'bar', '127.0.0.1:37459', 'quux']
151
152 >>> words = ["foo", "7f000001:a3db:4fb9", "bar", "quux"]
153 >>> decode_client(words, 1)
154 >>> words
155 ['foo', '127.0.0.1:41947 (id 20409)', 'bar', 'quux']
156
157 """
158 chunks = words[i].split(":")
159
160 ip = convert_ip(chunks[0])
161 port = int(chunks[1], 16)
162 words[i] = f"{ip}:{port}"
163
164 if len(chunks) == 3:
165 # For a "query" entry's clientip:clientport:id field.
166 id = int(chunks[2], 16)
167 words[i] += f" (id {id})"
168
169 def decode_ip(words : list, i : int):
170 r"""
171 Helper function to decode the ip field in a dnscache log
172 entry.
173
174 A single "serverip" field is present in the lame, nodata,
175 nxdomain, and rr entry types.
176
177 Parameters
178 ----------
179
180 words : list
181 The ``words`` list (a list of fields) from
182 :func:`handle_dnscache_log`.
183
184 i : int
185 The index of the ip field within ``words``
186
187 Returns
188 -------
189
190 Nothing; the ``i``th entry in the ``words`` list is modified
191 in-place.
192
193 Examples
194 --------
195
196 >>> words = ["foo", "bar", "7f000001", "quux"]
197 >>> decode_ip(words, 2)
198 >>> words
199 ['foo', 'bar', '127.0.0.1', 'quux']
200
201 >>> words = ["foo", "00000000000000000000ffff7f000001", "bar", "quux"]
202 >>> decode_ip(words, 1)
203 >>> words
204 ['foo', '0000:0000:0000:0000:0000:ffff:7f00:0001', 'bar', 'quux']
205
206 """
207 words[i] = convert_ip(words[i])
208
209 def decode_ttl(words : list, i : int):
210 r"""
211 Helper function to decode the ttl field in a dnscache log
212 entry.
213
214 A single "ttl" field is present in the nodata, nxdomain, and
215 rr entry types.
216
217 Parameters
218 ----------
219
220 words : list
221 The ``words`` list (a list of fields) from
222 :func:`handle_dnscache_log`.
223
224 i : int
225 The index of the ttl field within ``words``
226
227 Returns
228 -------
229
230 Nothing; the ``i``th entry in the ``words`` list is modified
231 in-place.
232
233 Examples
234 --------
235
236 >>> words = ["c0a80101", "20865", "1", "www.example.com.", "5db8d822"]
237 >>> decode_ttl(words, 1)
238 >>> words
239 ['c0a80101', 'TTL=20865', '1', 'www.example.com.', '5db8d822']
240
241 """
242 words[i] = f"TTL={words[i]}"
243
244 def decode_serial(words : list, i : int):
245 serial = int(words[i])
246 words[i] = f"#{serial}"
247
248 def decode_type(words : list, i : int):
249 qt = words[i]
250 words[i] = query_type.get(int(qt), qt)
251
252 def handle_dnscache_log(line : str) -> typing.Optional[str]:
253 r"""
254 Handle a single log line if it matches the ``dnscache_log_re`` regex.
255
256 Parameters
257 ----------
258
259 line : string
260 The log line that might match ``dnscache_log_re``.
261
262 Returns
263 -------
264
265 Either the human-readable string if the log line was handled (that
266 is, if it was really a dnscache log line), or ``None`` if it was
267 not.
268
269 Examples
270 --------
271
272 >>> line = "2022-09-15 18:37:33.863805500 query 1 7f000001:a3db:4fb9 1 www.example.com."
273 >>> handle_dnscache_log(line)
274 '2022-09-15 18:37:33.863805500 query #1 127.0.0.1:41947 (id 20409) a www.example.com.'
275
276 >>> line = "2022-09-15 18:37:33.863874500 tx 0 1 www.example.com. . c0a80101"
277 >>> handle_dnscache_log(line)
278 '2022-09-15 18:37:33.863874500 tx g=0 a www.example.com. . 192.168.1.1'
279
280 >>> line = "2022-09-15 18:37:33.878529500 rr c0a80101 20865 1 www.example.com. 5db8d822"
281 >>> handle_dnscache_log(line)
282 '2022-09-15 18:37:33.878529500 rr 192.168.1.1 TTL=20865 a www.example.com. 93.184.216.34'
283
284 >>> line = "2022-09-15 18:37:33.878532500 stats 1 43 1 0"
285 >>> handle_dnscache_log(line)
286 '2022-09-15 18:37:33.878532500 stats count=1 motion=43 udp-active=1 tcp-active=0'
287
288 >>> line = "2022-09-15 18:37:33.878602500 sent 1 49"
289 >>> handle_dnscache_log(line)
290 '2022-09-15 18:37:33.878602500 sent #1 49'
291
292 >>> line = "this line is nonsense"
293 >>> handle_dnscache_log(line)
294
295 """
296 match = dnscache_log_re.match(line)
297 if not match:
298 return None
299
300 (timestamp, event, data) = match.groups()
301
302 words = data.split()
303 if event == "cached":
304 if words[0] not in ("cname", "ns", "nxdomain"):
305 decode_type(words, 0)
306
307 elif event == "drop":
308 decode_serial(words, 0)
309
310 elif event == "lame":
311 decode_ip(words, 0)
312
313 elif event == "nodata":
314 decode_ip(words, 0)
315 decode_ttl(words, 1)
316 decode_type(words, 2)
317
318 elif event == "nxdomain":
319 decode_ip(words, 0)
320 decode_ttl(words, 1)
321
322 elif event == "query":
323 decode_serial(words, 0)
324 decode_client(words, 1)
325 decode_type(words, 2)
326
327 elif event == "rr":
328 decode_ip(words, 0)
329 decode_ttl(words, 1)
330 if words[2] not in ("cname", "mx", "ns", "ptr", "soa"):
331 decode_type(words, 2)
332 if words[2] == "a": # decode answer to an A query
333 decode_ip(words, 4)
334 if words[2] == "txt": # text record
335 response = words[4]
336 if response.endswith("..."):
337 ellipsis = "..."
338 response = response[0:-3]
339 else:
340 ellipsis = ""
341 length = int(response[0:2], 16)
342 chars = []
343 for i in range(1, len(response)//2):
344 chars.append(chr(int(response[2*i : (2*i)+2], 16)))
345 txt = "".join(chars)
346 words[4] = f"{length}:\"{txt}{ellipsis}\""
347
348 elif event == "sent":
349 decode_serial(words, 0)
350
351 elif event == "stats":
352 words[0] = f"count={words[0]}"
353 words[1] = f"motion={words[1]}"
354 words[2] = f"udp-active={words[2]}"
355 words[3] = f"tcp-active={words[3]}"
356
357 elif event == "tx":
358 words[0] = f"g={words[0]}"
359 decode_type(words, 1)
360 # words[2] = name
361 # words[3] = control (domain for which these servers are believed
362 # to be authoritative)
363 for i in range(4, len(words)):
364 decode_ip(words, i)
365
366 elif event in ("tcpopen", "tcpclose"):
367 decode_client(words, 0)
368
369 # Reconstitute "data" (i.e. everything after the timestamp and the
370 # event) from "words", which was originally obtained by splitting
371 # "data".
372 data = " ".join(words)
373 return f"{timestamp} {event} {data}"
374
375
376
377 def handle_tinydns_log(line : str) -> typing.Optional[str]:
378 r"""
379 Handle a single log line if it matches the ``tinydns_log_re`` regex.
380
381 Parameters
382 ----------
383
384 line : string
385 The log line that might match ``tinydns_log_re``.
386
387 Returns
388 -------
389
390 Either the human-readable string if the log line was handled (that
391 is, if it was really a tinydns log line), or ``None`` if it was
392 not.
393
394 Examples
395 --------
396
397 >>> line = "2022-09-14 21:04:40.206516500 7f000001:9d61:be69 - 0001 www.example.com"
398 >>> handle_tinydns_log(line)
399 '2022-09-14 21:04:40.206516500 dropped query (no authority) from 127.0.0.1:40289 (id 48745): a www.example.com'
400
401 >>> line = "this line is nonsense"
402 >>> handle_tinydns_log(line)
403
404 """
405 match = tinydns_log_re.match(line)
406 if not match:
407 return None
408
409 (timestamp, ip, port, id, code, type, name) = match.groups()
410 ip = convert_ip(ip)
411 port = int(port, 16)
412 id = int(id, 16)
413
414 # Convert the "type" field to a human-readable record type name
415 # using the query_type dictionary. If the right name isn't present
416 # in the dictionary, we use the (decimal) type id instead.
417 type = int(type, 16) # "001c" -> 28
418 type = query_type.get(type, type) # 28 -> "aaaa"
419
420 line_tpl = "{timestamp} "
421
422 reason = query_drop_reason[code]
423 if code == "+":
424 line_tpl += "sent response to {ip}:{port} (id {id}): {type} {name}"
425 else:
426 line_tpl += "dropped query ({reason}) from {ip}:{port}"
427 if code != "/":
428 # If the query can actually be parsed, the log line is a
429 # bit more informative than it would have been otherwise.
430 line_tpl += " (id {id}): {type} {name}"
431
432 return line_tpl.format(timestamp=timestamp,
433 reason=reason,
434 ip=ip,
435 port=port,
436 id=id,
437 type=type,
438 name=name)
439
440
441 def parse_logfile(file : typing.TextIO):
442 r"""
443 Process a single log ``file``.
444
445 Parameters
446 ----------
447
448 file : typing.TextIO
449 An open log file, or stdin.
450
451 Examples
452 --------
453
454 >>> line = "@4000000063227a320c4f3114 7f000001:9d61:be69 - 0001 www.example.com\n"
455 >>> from tempfile import NamedTemporaryFile
456 >>> with NamedTemporaryFile(mode="w", delete=False) as f:
457 ... _ = f.write(line)
458 >>> f = open(f.name, 'r')
459 >>> parse_logfile(f)
460 2022-09-14 21:04:40.206516500 dropped query (no authority) from 127.0.0.1:40289 (id 48745): a www.example.com
461 >>> f.close()
462 >>> from os import remove
463 >>> remove(f.name)
464
465 """
466 # Open a pipe to tai64nlocal. We'll write lines of our input file
467 # (the log file) to it, and read back the same lines but with
468 # friendly timestamps in them.
469 from subprocess import Popen, PIPE
470 tai = Popen(["tai64nlocal"], stdin=PIPE, stdout=PIPE, text=True, bufsize=0)
471
472 for line in file:
473 tai.stdin.write(line)
474 line = tai.stdout.readline()
475
476 friendly_line = handle_tinydns_log(line)
477 if not friendly_line:
478 friendly_line = handle_dnscache_log(line)
479 if not friendly_line:
480 friendly_line = line
481
482 print(friendly_line)
483
484 def main():
485 r"""
486 The entry point to the program.
487
488 This function is responsible only for parsing any command-line
489 arguments, and then calling :func`parse_logfile` on them.
490 """
491 # Create an argument parser using the file's docsctring as its
492 # description.
493 from argparse import ArgumentParser, FileType
494 parser = ArgumentParser(description = __doc__)
495
496 # Parse zero or more positional arguments into a list of
497 # "logfiles". If none are given, read from stdin instead.
498 from sys import stdin
499 parser.add_argument("logfiles",
500 metavar="LOGFILE",
501 type=FileType("r"),
502 nargs="*",
503 default=[stdin],
504 help="djbdns logfile to process (default: stdin)")
505
506 # Warning: argparse automatically opens its file arguments here,
507 # and they only get closed when the program terminates. There's no
508 # real benefit to closing them one-at-a-time after calling
509 # parse_logfile(), because the "scarce" resource of open file
510 # descriptors gets consumed immediately, before any processing has
511 # happened. In other words, if you're going to run out of file
512 # descriptors, it's going to happen right now.
513 #
514 # So anyway, don't run this on several million logfiles.
515 args = parser.parse_args()
516 for f in args.logfiles:
517 parse_logfile(f)
518
519
520 if __name__ == "__main__":
521 main()