]> gitweb.michael.orlitzky.com - djbdns-logparse.git/blob - bin/djbdns-logparse.py
bin/djbdns-logparse.py: refactor handle_* types again.
[djbdns-logparse.git] / bin / djbdns-logparse.py
1 #!/usr/bin/python3
2 """
3 Convert tinydns and dnscache logs to human-readable form
4 """
5
6 import re, typing
7 from struct import pack
8 from subprocess import Popen, PIPE
9 from time import strftime, gmtime
10
11
12 ## Regular expressions for matching tinydns/dnscache log lines. We
13 ## compile these once here rather than within the corresponding
14 ## matching functions, because the latter get executed repeatedly.
15
16 # This first pattern is used to match the timestamp format that the
17 # tai64nlocal program produces. It appears in both dnscache and
18 # tinydns lines, after they've been piped through tai64nlocal, of
19 # course.
20 timestamp_pat = r'[\d-]+ [\d:\.]+'
21
22 # The regex to match dnscache log lines.
23 dnscache_log_re = re.compile(fr'({timestamp_pat}) (\w+)(.*)')
24
25 # The "hex4" pattern matches a string of four hexadecimal digits. This
26 # is used, for example, by tinydns to encode the query type
27 # identifier.
28 hex4_pat = r'[0-9a-f]{4}'
29
30 # The IP pattern matches a string of either 8 or 32 hexadecimal
31 # characters, which correspond to IPv4 and IPv6 addresses,
32 # respectively, in tinydns logs.
33 ip_pat = r'[0-9a-f]{8,32}'
34
35 # The regex to match tinydns log lines.
36 tinydns_log_re = re.compile(
37 rf'({timestamp_pat}) ({ip_pat}):({hex4_pat}):({hex4_pat}) ([\+\-IC/]) ({hex4_pat}) (.*)'
38 )
39
40 # A dictionary mapping query type identifiers, in decimal, to their
41 # friendly names for tinydns. Reference:
42 #
43 # https://en.wikipedia.org/wiki/List_of_DNS_record_types
44 #
45 # Note that mapping here is non-exhaustive, and that tinydns will
46 # log responses for record types that it does not know about.
47 query_type = {
48 1: "a",
49 2: "ns",
50 5: "cname",
51 6: "soa",
52 12: "ptr",
53 13: "hinfo",
54 15: "mx",
55 16: "txt",
56 17: "rp",
57 24: "sig",
58 25: "key",
59 28: "aaaa",
60 33: "srv",
61 35: "naptr",
62 38: "a6",
63 48: "dnskey",
64 52: "tlsa",
65 65: "https",
66 252: "axfr",
67 255: "any",
68 257: "caa"
69 }
70
71 # tinydns can drop a query for one of three reasons; this dictionary
72 # maps the symbol that gets logged in each case to a human-readable
73 # reason. We include the "+" case here, indicating that the query was
74 # NOT dropped, to avoid a special case later on when we're formatting
75 # the human-readable output.
76 query_drop_reason = {
77 "+": None,
78 "-": "no authority",
79 "I": "invalid query",
80 "C": "invalid class",
81 "/": "couldn't parse"
82 }
83
84
85 def convert_ip(ip : str):
86 """
87 Convert a hex string representing an IP address to conventional
88 human-readable form, ie. dotted-quad decimal for IPv4, and
89 8 colon-separated hex shorts for IPv6.
90
91 Examples
92 --------
93
94 >>> convert_ip("7f000001")
95 '127.0.0.1'
96 >>> convert_ip("00000000000000000000ffff7f000001")
97 '0000:0000:0000:0000:0000:ffff:7f00:0001'
98
99 """
100 if len(ip) == 8:
101 # IPv4, eg. "7f000001" -> "7f 00 00 01" -> "127.0.0.1"
102 return "%d.%d.%d.%d" % tuple(pack(">L", int(ip, 16)))
103 elif len(ip) == 32:
104 # IPv6 is actually simpler -- it's just a string-slicing operation.
105 return ":".join([ip[(4*i) : (4*i+4)] for i in range(8)])
106
107
108 def decode_client(words, i):
109 chunks = words[i].split(":")
110 if len(chunks) == 2: # ip:port
111 words[i] = "%s:%d" % (convert_ip(chunks[0]), int(chunks[1], 16))
112 elif len(chunks) == 3:
113 words[i] = "%s:%d (id %d)" % (convert_ip(chunks[0]),
114 int(chunks[1], 16),
115 int(chunks[2], 16))
116
117 def decode_ip(words, i):
118 words[i] = convert_ip(words[i])
119
120 def decode_ttl(words, i):
121 words[i] = "TTL=%s" % words[i]
122
123 def decode_serial(words, i):
124 serial = int(words[i])
125 words[i] = "#%d" % serial
126
127 def decode_type(words, i):
128 qt = words[i]
129 words[i] = query_type.get(int(qt), qt)
130
131 def handle_dnscache_log(line) -> typing.Optional[str]:
132 """
133 Handle a single log line if it matches the ``dnscache_log_re`` regex.
134
135 Parameters
136 ----------
137
138 line : string
139 The log line that might match ``dnscache_log_re``.
140
141 Returns
142 -------
143
144 Either the human-readable string if the log line was handled (that
145 is, if it was really a dnscache log line), or ``None`` if it was
146 not.
147
148 Examples
149 --------
150
151 >>> line = "2022-09-15 18:37:33.863805500 query 1 7f000001:a3db:4fb9 1 www.example.com."
152 >>> handle_dnscache_log(line)
153 '2022-09-15 18:37:33.863805500 query #1 127.0.0.1:41947 (id 20409) a www.example.com.'
154
155 >>> line = "2022-09-15 18:37:33.863874500 tx 0 1 www.example.com. . c0a80101"
156 >>> handle_dnscache_log(line)
157 '2022-09-15 18:37:33.863874500 tx g=0 a www.example.com. . 192.168.1.1'
158
159 >>> line = "2022-09-15 18:37:33.878529500 rr c0a80101 20865 1 www.example.com. 5db8d822"
160 >>> handle_dnscache_log(line)
161 '2022-09-15 18:37:33.878529500 rr 192.168.1.1 TTL=20865 a www.example.com. 93.184.216.34'
162
163 >>> line = "2022-09-15 18:37:33.878532500 stats 1 43 1 0"
164 >>> handle_dnscache_log(line)
165 '2022-09-15 18:37:33.878532500 stats count=1 motion=43 udp-active=1 tcp-active=0'
166
167 >>> line = "2022-09-15 18:37:33.878602500 sent 1 49"
168 >>> handle_dnscache_log(line)
169 '2022-09-15 18:37:33.878602500 sent #1 49'
170
171 >>> line = "this line is nonsense"
172 >>> handle_dnscache_log(line)
173
174 """
175 match = dnscache_log_re.match(line)
176 if not match:
177 return None
178
179 (timestamp, event, data) = match.groups()
180
181 words = data.split()
182 if event == "cached":
183 if words[0] not in ("cname", "ns", "nxdomain"):
184 decode_type(words, 0)
185
186 elif event == "drop":
187 decode_serial(words, 0)
188
189 elif event == "lame":
190 decode_ip(words, 0)
191
192 elif event == "nodata":
193 decode_ip(words, 0)
194 decode_ttl(words, 1)
195 decode_type(words, 2)
196
197 elif event == "nxdomain":
198 decode_ip(words, 0)
199 decode_ttl(words, 1)
200
201 elif event == "query":
202 decode_serial(words, 0)
203 decode_client(words, 1)
204 decode_type(words, 2)
205
206 elif event == "rr":
207 decode_ip(words, 0)
208 decode_ttl(words, 1)
209 if words[2] not in ("cname", "mx", "ns", "ptr", "soa"):
210 decode_type(words, 2)
211 if words[2] == "a": # decode answer to an A query
212 decode_ip(words, 4)
213 if words[2] == "txt": # text record
214 response = words[4]
215 if response.endswith("..."):
216 ellipsis = "..."
217 response = response[0:-3]
218 else:
219 ellipsis = ""
220 length = int(response[0:2], 16)
221 chars = []
222 for i in range(1, len(response)/2):
223 chars.append(chr(int(response[2*i : (2*i)+2], 16)))
224 words[4] = "%d:\"%s%s\"" % (length, "".join(chars), ellipsis)
225
226 elif event == "sent":
227 decode_serial(words, 0)
228
229 elif event == "stats":
230 words[0] = "count=%s" % words[0]
231 words[1] = "motion=%s" % words[1]
232 words[2] = "udp-active=%s" % words[2]
233 words[3] = "tcp-active=%s" % words[3]
234
235 elif event == "tx":
236 words[0] = "g=%s" % words[0]
237 decode_type(words, 1)
238 # words[2] = name
239 # words[3] = control (domain for which these servers are believed
240 # to be authoritative)
241 for i in range(4, len(words)):
242 decode_ip(words, i)
243
244 elif event in ("tcpopen", "tcpclose"):
245 decode_client(words, 0)
246
247 return f"{timestamp} {event} " + " ".join(words)
248
249
250
251 def handle_tinydns_log(line : str) -> typing.Optional[str]:
252 """
253 Handle a single log line if it matches the ``tinydns_log_re`` regex.
254
255 Parameters
256 ----------
257
258 line : string
259 The log line that might match ``tinydns_log_re``.
260
261 Returns
262 -------
263
264 Either the human-readable string if the log line was handled (that
265 is, if it was really a tinydns log line), or ``None`` if it was
266 not.
267
268 Examples
269 --------
270
271 >>> line = "2022-09-14 21:04:40.206516500 7f000001:9d61:be69 - 0001 www.example.com"
272 >>> handle_tinydns_log(line)
273 '2022-09-14 21:04:40.206516500 dropped query (no authority) from 127.0.0.1:40289 (id 48745): a www.example.com'
274
275 >>> line = "this line is nonsense"
276 >>> handle_tinydns_log(line)
277
278 """
279 match = tinydns_log_re.match(line)
280 if not match:
281 return None
282
283 (timestamp, ip, port, id, code, type, name) = match.groups()
284 ip = convert_ip(ip)
285 port = int(port, 16)
286 id = int(id, 16)
287
288 # Convert the "type" field to a human-readable record type name
289 # using the query_type dictionary. If the right name isn't present
290 # in the dictionary, we use the (decimal) type id instead.
291 type = int(type, 16) # "001c" -> 28
292 type = query_type.get(type, type) # 28 -> "aaaa"
293
294 line_tpl = "{timestamp} "
295
296 reason = query_drop_reason[code]
297 if code == "+":
298 line_tpl += "sent response to {ip}:{port} (id {id}): {type} {name}"
299 else:
300 line_tpl += "dropped query ({reason}) from {ip}:{port}"
301 if code != "/":
302 # If the query can actually be parsed, the log line is a
303 # bit more informative than it would have been otherwise.
304 line_tpl += " (id {id}): {type} {name}"
305
306 return line_tpl.format(timestamp=timestamp,
307 reason=reason,
308 ip=ip,
309 port=port,
310 id=id,
311 type=type,
312 name=name)
313
314
315 def parse_logfile(file : typing.TextIO):
316 r"""
317 Process a single log ``file``.
318
319 Parameters
320 ----------
321
322 file : typing.TextIO
323 An open log file, or stdin.
324
325 Examples
326 --------
327
328 >>> line = "@4000000063227a320c4f3114 7f000001:9d61:be69 - 0001 www.example.com\n"
329 >>> from tempfile import NamedTemporaryFile
330 >>> with NamedTemporaryFile(mode="w", delete=False) as f:
331 ... _ = f.write(line)
332 >>> f = open(f.name, 'r')
333 >>> parse_logfile(f)
334 2022-09-14 21:04:40.206516500 dropped query (no authority) from 127.0.0.1:40289 (id 48745): a www.example.com
335 >>> f.close()
336 >>> from os import remove
337 >>> remove(f.name)
338
339 """
340 # Open pipe to tai64nlocal: we will write lines of our input (the
341 # raw log file) to it, and read log lines with readable timestamps
342 # from it.
343 tai = Popen(["tai64nlocal"], stdin=PIPE, stdout=PIPE, text=True, bufsize=0)
344
345 for line in file:
346 tai.stdin.write(line)
347 line = tai.stdout.readline()
348
349 friendly_line = handle_tinydns_log(line)
350 if not friendly_line:
351 friendly_line = handle_dnscache_log(line)
352 if not friendly_line:
353 friendly_line = line
354
355 print(friendly_line)
356
357 def main():
358 r"""
359 The entry point to the program.
360
361 This function is responsible only for parsing any command-line
362 arguments, and then calling :func`parse_logfile` on them.
363 """
364 # Create an argument parser using the file's docsctring as its
365 # description.
366 from argparse import ArgumentParser, FileType
367 parser = ArgumentParser(description = __doc__)
368
369 # Parse zero or more positional arguments into a list of
370 # "logfiles". If none are given, read from stdin instead.
371 from sys import stdin
372 parser.add_argument("logfiles",
373 metavar="LOGFILE",
374 type=FileType("r"),
375 nargs="*",
376 default=[stdin],
377 help="djbdns logfile to process (default: stdin)")
378
379 # Warning: argparse automatically opens its file arguments here,
380 # and they only get closed when the program terminates. There's no
381 # real benefit to closing them one-at-a-time after calling
382 # parse_logfile(), because the "scarce" resource of open file
383 # descriptors gets consumed immediately, before any processing has
384 # happened. In other words, if you're going to run out of file
385 # descriptors, it's going to happen right now.
386 #
387 # So anyway, don't run this on several million logfiles.
388 args = parser.parse_args()
389 for f in args.logfiles:
390 parse_logfile(f)
391
392
393 if __name__ == "__main__":
394 main()