aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/apt-panopticon.lua
blob: e454740b3e0b683c3b4820e30d2ecfc4843546cb (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
#!/usr/bin/env luajit

local APT = require 'apt-panopticommon'
local D = APT.D
local I = APT.I
local W = APT.W
local E = APT.E
local C = APT.C
local arg, sendArgs = APT.parseArgs({...})
APT.html = true

--[[ TODO - What to do about HTTPS://deb.devuan.org/ redirects.
	    Some mirrors give a 404.
		Sledjhamr gives a 404, coz it's not listening on 443 for deb.devuan.org.
	    Some mirrors give a 200.
		They shouldn't have the proper certificate, but are giving a result anyway.
]]

local defaultURL = {scheme = "http"}
local releases = {"jessie", "ascii", "beowulf", "ceres"}
local releaseFiles =
{
    -- Release file.
    "/Release",				-- 3.7 MB
    "/Release.gpg",			--
--    "/InRelease",			-- 3.7 MB
--    "/main/binary-all/Packages.xz",	-- 2.6 GB for all that changed recently.
    -- Contents files.			-- 3.3 GB
--    "/main/Contents-all.xz",
--    "/main/Contents-amd64.xz",
--    "/main/Contents-arm64.xz",
--    "-security/main/Contents-all.xz",
--    "-security/main/Contents-amd64.xz",
--    "-security/main/Contents-arm64.xz",
}
local notExist =
{
    "ceres-security"	-- This will never exist, it's our code name for the testing suite.
}
local referenceDebs =
{
    -- Debian package.
    "merged/pool/DEBIAN/main/d/dash/dash_0.5.8-2.4_amd64.deb",
    -- Debian security package.  NOTE this one should always be redirected?
    "merged/pool/DEBIAN-SECURITY/updates/main/a/apt/apt-transport-https_1.4.9_amd64.deb",
}
local referenceDevs =
{
    -- Devuan package.  NOTE this one should not get redirected, but that's more a warning than an error.
    "merged/pool/DEVUAN/main/d/desktop-base/desktop-base_3.0_all.deb",
    "merged/pool/DEVUAN/main/u/util-linux/util-linux_2.32.1-0.1+devuan2.1_amd64.deb",
}

local curlStatus = 
{
    [1 ] = "Unsupported protocol. This build of curl has no support for this protocol.",
    [2 ] = "Failed to initialize.",
    [3 ] = "URL malformed. The syntax was not correct.",
    [4 ] = "A feature or option that was needed to perform the desired request was not enabled or was explicitly disabled at build-time. To make curl able to do this, you probably need another build of libcurl!",
    [5 ] = "Couldn't resolve proxy. The given proxy host could not be resolved.",
    [6 ] = "Couldn't resolve host. The given remote host was not resolved.",
    [7 ] = "Failed to connect to host.",
    [8 ] = "Weird server reply. The server sent data curl couldn't parse.",
    [9 ] = "FTP access denied. The server denied login or denied access to the particular resource or directory you wanted to reach. Most often you tried to change to a directory that doesn't exist on the server.",
    [10] = "While waiting for the server to connect back when an active FTP session is used, an error code was sent over the control connection or similar.",
    [11] = "FTP weird PASS reply. Curl couldn't parse the reply sent to the PASS request.",
    [12] = "During an active FTP session while waiting for the server to connect, the CURLOPT_ACCEPTTIMEOUT_MS (or the internal default) timeout expired.",
    [13] = "FTP weird PASV reply, Curl couldn't parse the reply sent to the PASV request.",
    [14] = "FTP weird 227 format. Curl couldn't parse the 227-line the server sent.",
    [15] = "FTP can't get host. Couldn't resolve the host IP we got in the 227-line.",
    [16] = "A problem was detected in the HTTP2 framing layer. This is somewhat generic and can be one out of several problems, see the error buffer for details.",
    [17] = "FTP couldn't set binary. Couldn't change transfer method to binary.",
    [18] = "Partial file. Only a part of the file was transferred.",
    [19] = "FTP couldn't download/access the given file, the RETR (or similar) command failed.",

    [21] = "FTP quote error. A quote command returned error from the server.",
    [22] = "HTTP page not retrieved. The requested url was not found or returned another error with the HTTP error code being 400 or above. This return code only appears if -f, --fail is used.",
    [23] = "Write error. Curl couldn't write data to a local filesystem or similar.",

    [25] = "FTP couldn't STOR file. The server denied the STOR operation, used for FTP uploading.",
    [26] = "Read error. Various reading problems.",
    [27] = "Out of memory. A memory allocation request failed.",
    [28] = "Operation timeout. The specified time-out period was reached according to the conditions.",

    [30] = "FTP PORT failed. The PORT command failed. Not all FTP servers support the PORT command, try doing a transfer using PASV instead!",
    [31] = "FTP couldn't use REST. The REST command failed. This command is used for resumed FTP transfers.",

    [33] = "HTTP range error. The range \"command\" didn't work.",
    [34] = "HTTP post error. Internal post-request generation error.",
    [35] = "SSL connect error. The SSL handshaking failed.",
    [36] = "FTP bad download resume. Couldn't continue an earlier aborted download.",
    [37] = "FILE couldn't read file. Failed to open the file. Permissions?",
    [38] = "LDAP cannot bind. LDAP bind operation failed.",
    [39] = "LDAP search failed.",

    [41] = "Function not found. A required LDAP function was not found.",
    [42] = "Aborted by callback. An application told curl to abort the operation.",
    [43] = "Internal error. A function was called with a bad parameter.",

    [45] = "Interface error. A specified outgoing interface could not be used.",

    [47] = "Too many redirects. When following redirects, curl hit the maximum amount.",
    [48] = "Unknown option specified to libcurl. This indicates that you passed a weird option to curl that was passed on to libcurl and rejected. Read up in the manual!",
    [49] = "Malformed telnet option.",

    [51] = "The peer's SSL certificate or SSH MD5 fingerprint was not OK.",
    [52] = "The server didn't reply anything, which here is considered an error.",
    [53] = "SSL crypto engine not found.",
    [54] = "Cannot set SSL crypto engine as default.",
    [55] = "Failed sending network data.",
    [56] = "Failure in receiving network data.",

    [58] = "Problem with the local certificate.",
    [59] = "Couldn't use specified SSL cipher.",
    [60] = "Peer certificate cannot be authenticated with known CA certificates.",
    [61] = "Unrecognized transfer encoding.",
    [62] = "Invalid LDAP URL.",
    [63] = "Maximum file size exceeded.",
    [64] = "Requested FTP SSL level failed.",
    [65] = "Sending the data requires a rewind that failed.",
    [66] = "Failed to initialise SSL Engine.",
    [67] = "The user name, password, or similar was not accepted and curl failed to log in.",
    [68] = "File not found on TFTP server.",
    [69] = "Permission problem on TFTP server.",
    [70] = "Out of disk space on TFTP server.",
    [71] = "Illegal TFTP operation.",
    [72] = "Unknown TFTP transfer ID.",
    [73] = "File already exists (TFTP).",
    [74] = "No such user (TFTP).",
    [75] = "Character conversion failed.",
    [76] = "Character conversion functions required.",
    [77] = "Problem with reading the SSL CA cert (path? access rights?).",
    [78] = "The resource referenced in the URL does not exist.",
    [79] = "An unspecified error occurred during the SSH session.",
    [80] = "Failed to shut down the SSL connection.",
    [81] = "Socket is not ready for send/recv wait till it's ready and try again. This return code is only returned from curl_easy_recv and curl_easy_send.",
    [82] = "Could not load CRL file, missing or wrong format (added in 7.19.0).",
    [83] = "Issuer check failed (added in 7.19.0).",
    [84] = "The FTP PRET command failed",
    [85] = "RTSP: mismatch of CSeq numbers",
    [86] = "RTSP: mismatch of Session Identifiers",
    [87] = "unable to parse FTP file list",
    [88] = "FTP chunk callback reported error",
    [89] = "No connection available, the session will be queued",
    [90] = "SSL public key does not matched pinned public key",
    [91] = "Status returned failure when asked with CURLOPT_SSL_VERIFYSTATUS.",
    [92] = "Stream error in the HTTP/2 framing layer.",
    [93] = "An API function was called from inside a callback.",
    [94] = "An authentication function returned an error.",
    [95] = "A problem was detected in the HTTP/3 layer. This is somewhat generic and can be one out of several problems, see the error buffer for details.",
}


local socket = require 'socket'
local ftp = require 'socket.ftp'
local http = require 'socket.http'
local url = require 'socket.url'


local ip = ""


local repoExists = function (r)
    r = r:match("([%a-]*)")
    if nil == r then return false end
    for k, v in pairs(notExist) do
	if v == r then return false end
    end
    return true
end

local IP = {}
gatherIPs = function (host)
    if nil == IP[host] then
        local IPs
	local dig = io.popen('dig +keepopen +noall +nottlid +answer ' .. host .. ' A ' .. host .. ' AAAA ' .. host .. ' CNAME ' .. host .. ' SRV | sort -r | uniq')
	repeat
	    IPs = dig:read("*l")
	    if nil ~= IPs then
		for k, t, v in IPs:gmatch("([%w_%-%.]*)%.%s*IN%s*(%a*)%s*(.*)") do
		    if "." == v:sub(-1, -1) then v = v:sub(1, -2) end
		    if nil == IP[k] then IP[k] = {} end
		    IP[k][v] = t
		    D("  DNS record " .. host .. " == " .. k .. " type " .. t .. " -> " .. v)
		    if t == "CNAME" then
			gatherIPs(v)
			IP[k][v] = IP[v]
		    elseif v == "SRV" then
			print("SVR record found, now what do we do?")
		    end
		end
	    end
	until nil == IPs
    end
end

-- Returns FTP directory listing
local nlst = function (u)
    local t = {}
    local p = url.parse(u)
    p.command = "nlst"
    p.sink = ltn12.sink.table(t)
    local r, e = ftp.get(p)
    return r and table.concat(t), e
end

local timeouts = 0;
local totalTimeouts = 0
checkHEAD = function (host, URL, r, retry, sanity)
    if nil == r then r = 0 end
    if nil == retry then retry = 0 end
    if nil == sanity then sanity = false end
    local check = "Checking file"
    local PU = url.parse(URL, defaultURL)
    local pu = url.parse(PU.scheme .. "://" .. host, defaultURL)
    if 0 < r then
	check = "Redirecting to"
    end
    if 0 < retry then
	os.execute("sleep " .. math.random(1, 3))
	check = "Retry " .. retry .. " " .. check
    end
    if 2 <= timeouts then
	E("too many timeouts!  " .. check .. " " .. host .. " -> " .. URL, PU.scheme, "", host)
	return
    end
    if 4 <= (totalTimeouts) then
	E("Way too many timeouts!", PU.scheme, "", host)
	return
    end
    if 20 <= r then
	E("too many redirects!  " .. check .. " " .. host .. " -> " .. URL, PU.scheme, "", host)
	return
    end
    if 4 <= retry then
	E("too many retries!  " .. check .. " " .. host .. " -> " .. URL, PU.scheme, "", host)
	return
    end
    D(PU.scheme .. " ://  " .. check .. " " .. host .. " -> " .. URL)
    if not APT.testing(PU.scheme, host) then D("Not testing " .. PU.scheme .. " " .. host .. " -> " .. URL);  return end
    -- TODO - Perhaps we should try it anyway, and mark it as a warning if it DOES work?
    if "https" == PU.scheme and APT.options.roundRobin.value == host then D("Not testing " .. PU.scheme .. " " .. host .. " -> " .. URL .. " mirrors shouldn't have the correct cert.");  return end

    --[[ Using curl command line -
	-I - HEAD
	--connect-to domain:port:IP:port - connect to IP, but use SNI from URL.
	-header "" - add extra headers.
	-L - DO follow redirects.
	--max-redirs n - set maximum redirects, default is 50, -1 = unlimited.
	--retry n - maximum retries, default is 0, no retries.
	-o file - write to file instead of stdout.
	--path-as-is - https://curl.haxx.se/libcurl/c/CURLOPT_PATH_AS_IS.html might be useful for URLSanity.
	-s silent - don't output progress or error messages.
	--connect-timeout n - timeout in seconds.
	    Should return with error code 28 on a timeout?
	-D file - write the received headers to a file.  This includes the status code and string.
    ]]
    local fname = host .. "_" .. PU.host .. "_" .. PU.path:gsub("/", "_") .. ".txt"
    local hdr = ""
    local IP = ""
    if pu.host ~= PU.host then
	if "http" == PU.scheme then
	    hdr = '-H "Host: ' .. host .. '"'
	end
	IP = '--connect-to "' .. pu.host .. '::' .. PU.host .. ':"'
    end
    local cmd = 'ionice -c3 nice -n 19 curl -I --retry 0 -s --path-as-is --connect-timeout ' .. APT.options.timeout.value .. ' --max-redirs 0 ' ..
					     IP .. ' ' .. '-o /dev/null -D results/"HEADERS_' .. fname .. '" ' ..
		hdr .. ' -w "#%{http_code} %{ssl_verify_result} %{url_effective}\\n" ' .. PU.scheme .. '://' .. host .. PU.path .. ' >>results/"STATUS_' .. fname .. '"'
    local status, result = APT.execute(cmd)
    os.execute('cat results/"HEADERS_' .. fname .. '" >>results/"STATUS_' .. fname .. '" 2>/dev/null; rm -f results/"HEADERS_' .. fname .. '" 2>/dev/null')
    if "0" ~= status then
	local msg = curlStatus[0 + status]
	if nil == msg then msg = "UNKNOWN CURL STATUS CODE!" end
	if sanity then
	    E("  The curl command return an error code of " .. status .. " - " .. msg, PU.scheme, "URLSanity", host)
	else
	    E("  The curl command return an error code of " .. status .. " - " .. msg, PU.scheme, "", host)
	end
	if ("28" == status) or ("7" == status) then
	    if sanity then
		E("  TIMEOUT " .. timeouts + 1 .. ", retry " .. retry + 1, PU.scheme, "URLSanity", host)
	    else
		E("  TIMEOUT " .. timeouts + 1 .. ", retry " .. retry + 1, PU.scheme, "", host)
	    end
	    timeouts = timeouts + 1
	end
	checkHEAD(host, URL, r, retry + 1, sanity)
	return
    end
    local rfile, e = io.open("results/STATUS_" .. fname, "r")
    local code = "000"
    local cstr = ""
    local location = nil
    if nil == rfile then W("opening results/STATUS_" .. fname .. " file - " .. e) else
	for line in rfile:lines("*l") do
	    if "#" == line:sub(1, 1) then
		code = line:sub(2, 4)
		if ("https" == PU.scheme) and ("0" ~= line:sub(6, 6)) then E("  The certificate is invalid.", PU.scheme, "https", host) end
	    elseif "http" == line:sub(1, 4):lower() then
		-- -2 coz the headers file gets a \r at the end.
		cstr = line:sub(14, -2)
	    elseif "location" == line:sub(1, 8):lower() then
		location = line:sub(11, -2)
	    end
	end
    end
    os.execute('rm -f results/"STATUS_' .. fname .. '" 2>/dev/null')
    if ("4" == tostring(code):sub(1, 1)) or ("5" == tostring(code):sub(1, 1)) then
	if sanity then
	    E("  " .. code .. " " .. cstr .. ".   " .. check .. " " .. host .. " -> " .. URL, PU.scheme, "URLSanity", host)
	else
	    E("  " .. code .. " " .. cstr .. ".   " .. check .. " " .. host .. " -> " .. URL, PU.scheme, "", host)
	end
    else
	I("  " .. code .. " " .. cstr .. ".   " .. check .. " " .. host .. " -> " .. URL)
--	timeouts = timeouts - 1	-- Backoff the timeouts count if we managed to get through.
	if nil ~= location then
	    pu = url.parse(location, defaultURL)
	    if ('http' == location:sub(1, 4)) and (pu.scheme ~= PU.scheme) then	-- Sometimes a location sans scheme is returned, this is not a protocol change.
		if APT.testing("Protocol") then W("    protocol changed during redirect!  " .. check .. " " .. host .. " -> " .. URL .. " -> " .. location, PU.scheme, "Protocol", host) end
		if (pu.host == host) and pu.path == PU.path then D("Not testing protocol change " .. URL .. " -> " .. location);  return end
	    end

	    if location == URL then
		E("    redirect loop!  " .. check .. " " .. host .. " -> " .. URL, PU.scheme, "", host)
	    elseif nil == pu.host then
		I("    relative redirect.  " .. check .. " " .. host .. " -> " .. URL .. " -> " .. location)
		checkHEAD(host, PU.scheme .. "://" .. PU.host .. location, r + 1, retry, sanity)
	    elseif (PU.host == pu.host) or (host == pu.host) then
		checkHEAD(pu.host, location, r + 1, retry, sanity)
	    else
		--[[ The hard part here is that we end up throwing ALL of the test files at the redirected location.
		    Not good for deb.debian.org, which we should only be throwing .debs at.
		    What we do is loop through the DNS entries, and only test the specific protocol & file being tested here.

		    This is what I came up with for checking if we are already testing a specific URL.
		    Still duplicates a tiny bit, but much less than the previous find based method.
		]]
		local file = pu.host .. "://" .. pu.path
		local f = io.popen(string.format('if [ ! -f results/%s.check ] ; then touch results/%s.check; echo -n "check"; fi', file:gsub("/", "_"), file:gsub("/", "_") )):read("*a")
		if (nil == f) or ("check" == f) then
		    I("  Now checking redirected host " .. file)
		    checkHost(pu.host, pu.host, nil, "redir", pu.path)
		else
		    D("  Already checking " .. file)
		end
	    end
	end
    end
end

local checkTimeouts = function(host, scheme, URL)
    if APT.testing(scheme) then
	totalTimeouts = totalTimeouts + timeouts; timeouts = 0
	checkHEAD(host, scheme .. "://" .. URL)
	if 4 <= (totalTimeouts) then
	    E("Way too many timeouts!", scheme, "", host)
	    return true
	end
    end
    if APT.testing("URLSanity") then
	URL = URL:gsub("merged/", "merged///")
	totalTimeouts = totalTimeouts + timeouts; timeouts = 0
	checkHEAD(host, scheme .. "://" .. URL, 0, 0, true)
	if 4 <= (totalTimeouts) then
	    E("Way too many timeouts!", scheme, "URLSanity", host)
	    return true
	end
    end
    return false
end

local checkFiles = function (host, ip, path, file)
    timeouts = 0
    if nil == path then path = "" end
    if nil ~= file then
	if "redir" == ip then ip = host end
	I("  Checking IP for file " .. host .. " -> " .. ip .. " " .. path .. " " .. file)
	if checkTimeouts(host, "http",  ip .. path .. "/" .. file) then return end
	if checkTimeouts(host, "https", ip .. path .. "/" .. file) then return end
    else
	I("  Checking IP " .. host .. " -> " .. ip .. " " .. path)
	for i, s in pairs(referenceDevs) do
	    if checkTimeouts(host, "http",  ip .. path .. "/" .. s) then return end
	    if checkTimeouts(host, "https", ip .. path .. "/" .. s) then return end
	end

	for i, s in pairs(releases) do
	    for j, k in pairs(releaseFiles) do
		if repoExists(s .. k) then
		    if checkTimeouts(host, "http",  ip .. path .. "/merged/dists/" .. s .. k) then return end
		    if checkTimeouts(host, "https", ip .. path .. "/merged/dists/" .. s .. k) then return end
		end
	    end
	end
    end
end

checkHost = function (orig, host, path, ip, file)
    if nil == host then host = orig end
    if nil == path then path = "" end
    if nil == file then file = "" end
    local ph = url.parse("http://" .. host)
    if (nil ~= ip) and ("redir" ~= ip) then
	local po = url.parse("http://" .. orig)
	if "" ~= file then
	    D("checking redirected file " .. po.host .. " " .. file)
	    checkFiles(po.host, ip, path, file)
	else
	    checkFiles(po.host, ip, path)
	end
    else
	if orig == host then
	    D("checkHost " .. orig .. "" .. file)
	    if APT.testing("IPv4") then APT.fork("ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " -o " .. orig .. path .. " " .. file) end
	else D("checkHost " .. orig .. " -> " .. host) end
	local h = APT.mirrors[ph.host]
	if nil == h then return end
	for k, v in pairs(h.IPs) do
	    if "table" == type(v) then
		for k1, v1 in pairs(v) do
		    if v1 == "A" then
			if APT.testing("IPv4") then APT.fork("ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " " .. orig .. path .. " " .. k1 .. " " .. file) end
		    elseif v1 == "AAAA" then
			if APT.testing("IPv6") then APT.fork("ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " " .. orig .. path .. " " .. k1 .. " " .. file) end
		    end
		end
	    else
		if v == "A" then
		    if APT.testing("IPv4") then APT.fork("ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " " .. orig .. path .. " " .. k .. " " .. file) end
		elseif v == "AAAA" then
		    if APT.testing("IPv6") then APT.fork("ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " " .. orig .. path .. " " .. k .. " " .. file) end
		end
	    end
	end
    end
end


local addDownload = function(host, URL, f, r, k)
    local file = k:match(".*/([%w%.%+%-_]*)$")	-- Get the filename.
    if APT.checkFile("results/" .. host .. "/merged/dists/" .. r .. k) then
	-- Curls "check timestamp and overwrite file" stuff sucks.
	-- -R means the destination file gets the timestamp of the remote file.
	-- Can only do ONE timestamp check per command.
	--  This doesn't work either.  All downloads get all these headers.  Pffft
--	local status, ts = APT.execute('TZ="GMT" ls -l --time-style="+%a, %d %b %Y %T %Z"  results/' .. host .. "/merged/dists/" .. r .. k .. ' | cut -d " " -f 6-11')
--	f:write('header "If-Modified-Since: ' .. ts:sub(2, -2) .. '"\n')
	-- Curl will DELETE the existing file if the timestamp fails to download a new one, unless we change directory first, 
	-- which wont work with multiple files in multiple directories.  WTF?
	os.execute(" mv   results/" .. host .. "/merged/dists/" .. r .. k .. 
			" results/" .. host .. "/merged/dists/" .. r .. k .. ".old")
    end

    D('Downloading http://' .. host .. URL .. '/merged/dists/' .. r .. k)
    f:write('url "' .. 'http://' .. host .. URL .. '/merged/dists/' .. r .. k .. '"\n')
    f:write('output "results/' .. host .. '/merged/dists/' .. r .. k .. '"\n')
end

local postDownload = function(host, r, k)
    local file = k:match(".*/([%w%.%+%-_]*)$")	-- Get the filename.
    local dir  = k:sub(1, 0 - (#file + 1))
    os.execute("if [ -f results/" .. host .. "/merged/dists/" .. r .. k .. ".old ]" ..
	    " && [ ! -f results/" .. host .. "/merged/dists/" .. r .. k .. " ]; then cp -a" ..
		    "   results/" .. host .. "/merged/dists/" .. r .. k .. ".old" ..
		    "   results/" .. host .. "/merged/dists/" .. r .. k .. "; fi")
    if ".gz"  == k:sub(-3, -1) then APT.execute("ionice -c3 nice -n 19 gzip -dfk results/" .. host .. "/merged/dists/" .. r .. k) end
    if ".xz"  == k:sub(-3, -1) then APT.execute("ionice -c3 nice -n 19 xz   -dfk results/" .. host .. "/merged/dists/" .. r .. k .. " 2>/dev/null") end
    if APT.testing("Integrity") then
	if ".gpg" == k:sub(-4, -1) then
	    local status, out = APT.execute("gpgv --keyring /usr/share/keyrings/devuan-keyring.gpg results/" .. host .. "/merged/dists/" .. r .. k .. 
					" results/" .. host .. "/merged/dists/" .. r .. k:sub(1, -5) .. " 2>/dev/null")
	    if "0" ~= status then E("GPG check failed - " .. host .. "/merged/dists/" .. r .. k, "http", "Integrity", host) end
	end
-- TODO - should check the PGP sig of InRelease as well.
    end
    if APT.testing("Integrity") or APT.testing("Updated") then
	if "Packages." == file:sub(1, 9) then
-- TODO - compare the SHA256 sums in pkgmaster's Release for both the packed and unpacked versions.
--        Also note that this might get only a partial download due to maxtime.
	    if APT.options.referenceSite.value == host then
		local Pp, e = io.open('results/' .. host .. '/merged/dists/'.. r .. dir .. 'Packages.parsed', "w+")
		if nil == Pp then W('opening results/' .. host .. '/merged/dists/'.. r .. dir .. 'Packages.parsed' .. ' file - ' .. e) else
		    local pp = {}
		    for l in io.lines('results/' .. host .. '/merged/dists/'.. r .. dir .. 'Packages') do
			if "Package: "	== l:sub(1, 9) then
			    if 0 ~= #pp then
				for i = 1, 5 do
				    if nil == pp[i] then print(host .. " " .. r .. " " .. dir .. " " ..  i) else Pp:write(pp[i] .. " | ") end
				end
				Pp:write("\n")
			    end
			    pp = {}
			    pp[1] = 	   l:sub(10, -1)
			elseif "Version: "	== l:sub(1, 9) then
			    pp[2] = 	   l:sub(10, -1)
			elseif "Filename: "	== l:sub(1, 10) then
			    pp[3] = 	   l:sub(11, -1)
			elseif "Size: "	== l:sub(1, 6) then
			    pp[4] = 	   l:sub(7, -1)
			elseif "SHA256: "	== l:sub(1, 8) then
			    pp[5] = 	   l:sub(9, -1)
			end
		    end
		    Pp:close()
		    os.execute('sort results/' .. host .. '/merged/dists/'.. r .. dir .. 'Packages.parsed >results/' .. host .. '/merged/dists/'.. r .. dir .. 'Packages_parsed-sorted')
		    if APT.checkFile('Packages/' .. r .. dir .. 'Packages_parsed-sorted') then
			os.execute('diff -U 0    Packages/' .. r .. dir .. 'Packages_parsed-sorted ' ..
						'results/pkgmaster.devuan.org/merged/dists/' .. r .. dir .. 'Packages_parsed-sorted ' ..
						' | grep -E "^-" | grep -Ev "^\\+\\+\\+|^---" >>results/OLD_PACKAGES_' .. r .. '.txt')
			os.execute('diff -U 0    Packages/' .. r .. dir .. 'Packages_parsed-sorted ' ..
						'results/pkgmaster.devuan.org/merged/dists/' .. r .. dir .. 'Packages_parsed-sorted ' ..
						' | grep -E "^\\+" | grep -Ev "^\\+\\+\\+|^---" >>results/NEW_Packages_' .. r .. '.txt')
			-- Find the smallest new package for each release.
			os.execute('sort -b -k 9,9 -n results/NEW_Packages_' .. r .. '.txt >results/NEW_Packages_' .. r .. '.sorted.txt')
			os.execute('grep -s " | pool/DEBIAN/"          results/NEW_Packages_' .. r .. '.sorted.txt 2>/dev/null | head -n 1  >results/NEW_Packages_' .. r .. '.test.txt')
			os.execute('grep -s " | pool/DEBIAN-SECURITY/" results/NEW_Packages_' .. r .. '.sorted.txt 2>/dev/null | head -n 1 >>results/NEW_Packages_' .. r .. '.test.txt')
			os.execute('grep -s " | pool/DEVUAN/"          results/NEW_Packages_' .. r .. '.sorted.txt 2>/dev/null | head -n 1 >>results/NEW_Packages_' .. r .. '.test.txt')
		    else
			C("Can't find file Packages/" .. r .. dir .. "Packages_parsed-sorted")
		    end
		    os.execute('mkdir -p Packages/' .. r .. dir)
		    os.execute('mv -f results/pkgmaster.devuan.org/merged/dists/' .. r .. dir .. 'Packages_parsed-sorted Packages/' .. r .. dir .. 'Packages_parsed-sorted')
		end
	    end
	    os.execute('rm -f results/' .. host .. '/merged/dists/' .. r .. dir .. 'Packages   2>/dev/null')
	    os.execute('rm -f results/' .. host .. '/merged/dists/' .. r .. dir .. 'Packages.* 2>/dev/null')
	end
    end
end

local downloadLock = "flock -n results/curl-"
local download = "curl --connect-timeout " .. APT.options.timeout.value .. " --create-dirs -f -L --max-time " .. APT.options.maxtime.value .. " -z 'results/stamp.old' -v -R "
local downloads = function(host, URL, release, list)
    if nil == URL then URL = "" end
    local lock = "META-" .. host .. ".lock"
    local log = " --stderr results/curl-META-" .. host .. ".log"
    local cm = "ionice -c3 nice -n 19 " .. downloadLock .. lock .. " " .. download .. log .. " -K results/" .. host .. ".curl"
    if      APT.testing("IPv4")  and (not APT.testing("IPv6")) then cm = cm .. ' -4' end
    if (not APT.testing("IPv4")) and      APT.testing("IPv6")  then cm = cm .. ' -6' end
    f, e = io.open("results/" .. host .. ".curl", "a+")
    if nil == f then C("opening curl file - " .. e); return end

    if nil ~= list then
	if "" ~= list then
	    if nil ~= release then
		for l in list:gmatch("\n*([^\n]+)\n*") do
		    addDownload(host, URL, f, release, "/" .. l)
		end
	    else
		D('Downloading http://' .. host .. URL .. '/merged/' .. list)
		f:write('url "' .. 'http://' .. host .. URL .. '/merged/' .. list .. '"\n')
		f:write('output "results/' .. host .. '/merged/' .. list .. '"\n')
	    end
	    f:close()
	    return
	end
    else
	for i, s in pairs(releases) do
	    for j, k in pairs(releaseFiles) do
		if repoExists(s .. k) then
		    addDownload(host, URL, f, s, k)
		end
	    end
	end
    end
    f:close()
    APT.fork(cm)
end


local getMirrors = function ()
    local mirrors = {}
    local host = ""
    local m = {}
    local active = true
    local URL = "http://" .. APT.options.referenceSite.value .. "/mirror_list.txt"
    I("getting mirrors.")
    local p, c, h = http.request(URL)
    if nil == p then E(c .. " fetching " .. URL) else

	for l in p:gmatch("\n*([^\n]+)\n*") do
	    local t, d = l:match("(%a*):%s*(.*)")
	    d = string.lower(d)
	    if "FQDN" == t then
		if "" ~= host then
		    mirrors[host] = m
		    m = {}
		    active = true
		end
		host = d
		m[t] = d
		gatherIPs(host)
		m["IPs"] = IP[host]
	    elseif "Protocols" == t then
		local prot = {}
		for w in d:gmatch("(%w+)") do
		    prot[w] = true;
		end
		m[t] = prot
	    elseif "Active" == t and nil == d:sub(1, 3):find("yes", 1, true) then
		W("Mirror " .. host .. " is not active - " .. d, "", "", host)
		active = false
		m[t] = d
-- TODO - Should do some input validation on BaseURL, and everything else.
	    else
		m[t] = d
	    end
	end
	if "" ~= host --[[and active]] then
	    mirrors[host] = m
	end
    end
    if APT.testing("DNSRR") then
	mirrors[APT.options.roundRobin.value] = { ["Protocols"] = { ["http"] = true; ["https"] = true; };  ["FQDN"] = 'deb.devuan.org'; ["Active"] = 'yes'; ["BaseURL"] = 'deb.devuan.org'; }
	gatherIPs(APT.options.roundRobin.value)
	mirrors[APT.options.roundRobin.value].IPs = IP[APT.options.roundRobin.value]
    end
    local file, e = io.open("results/mirrors.lua", "w+")
    if nil == file then C("opening mirrors file - " .. e) else
	file:write(APT.dumpTable(mirrors, "", "mirrors") .. "\nreturn mirrors\n")
        file:close()
    end
    return mirrors
end


if 0 < #arg then
    if "/" == arg[1]:sub(-1, -1) then
	W("slash at end of path!  " .. arg[1])
	arg[1] = arg[1]:sub(1, -2)
    end
    if " " == arg[1]:sub(-1, -1) then
	W("space at end of path!  " .. arg[1])
	arg[1] = arg[1]:sub(1, -2)
    end
    local pu = url.parse("http://" .. arg[1])

    if APT.testing("Integrity") or APT.testing("Updated") then
	if APT.origin and APT.options.referenceSite.value == pu.host then
--	    if not APT.keep then os.execute("rm -fr results/" .. pu.host .. " 2>/dev/null") end
	end
    end

    if nil ~= arg[2] then
	APT.logFile, e = io.open("results/LOG_" .. pu.host .. "_" .. arg[2] .. ".html", "a+")
    else
	APT.logFile, e = io.open("results/LOG_" .. pu.host .. ".html", "a+")
    end
    if nil == APT.logFile then C("opening log file - " .. e); return end
    APT.logPre()
    I("Starting tests for " .. arg[1] .. " with these tests - " .. table.concat(APT.options.tests.value, ", "))
    APT.mirrors = loadfile("results/mirrors.lua")()
    if nil ~= arg[2] then I("  Using IP " .. arg[2]); ip = arg[2] end
    if nil ~= arg[3] then I("  Using file " .. arg[3]); end

    for k, v in pairs{"ftp", "http", "https", "rsync"} do
	if APT.testing(v) then
	    local tests = {errors = 0; warnings = 0}
	    if APT.testing("Integrity") then tests.Integrity = {errors = 0; warnings = 0} end
	    if APT.testing("Protocol")  then tests.Protocol  = {errors = 0; warnings = 0} end
	    if APT.testing("Updated")   then tests.Updated   = {errors = 0; warnings = 0} end
	    if APT.testing("URLSanity") then tests.URLSanity = {errors = 0; warnings = 0} end
	    APT.results[v] = tests
	end
    end
    if APT.origin then
	if APT.testing("Integrity") or APT.testing("Updated") then
	    if APT.origin and (APT.options.roundRobin.value ~= pu.host) then
		I("Starting file downloads for " .. pu.host)
		downloads(pu.host, pu.path)
	    end
	end
	checkFiles(pu.host, pu.host, pu.path);
    else
	checkHost(pu.host, pu.host, pu.path, arg[2], arg[3])
    end

    if APT.testing("Integrity") or APT.testing("Updated") then
	if 4 > (totalTimeouts) then
	    if APT.origin and (APT.options.roundRobin.value ~= pu.host) then
		while 0 < APT.checkExes(downloadLock .. "META-" .. pu.host .. ".lock") do os.execute("sleep 10") end
		os.execute( "rm -f results/" .. pu.host .. ".curl 2>/dev/null; rm -f results/curl-META-" .. pu.host .. ".lock 2>/dev/null; " ..  
			    "mv results/curl-META-" .. pu.host .. ".log results/curl-Release-" .. pu.host .. ".log")
		for i, n in pairs(releases) do
		    for l, o in pairs(releaseFiles) do
			if repoExists(i .. o) then
			    postDownload(pu.host, n, o)
			end
		    end

		    if  APT.checkFile('results/' .. pu.host .. '/merged/dists/' .. n .. '/Release') then
			os.execute('sort -k 3 results/' .. pu.host .. '/merged/dists/' .. n .. '/Release >results/' .. pu.host .. '/merged/dists/' .. n .. '/Release.SORTED')
			if APT.checkFile('results_old/pkgmaster.devuan.org/merged/dists/' .. n .. '/Release.SORTED') then
			    if APT.options.referenceSite.value == pu.host then
				os.execute('diff -U 0    results_old/pkgmaster.devuan.org/merged/dists/' .. n .. '/Release.SORTED ' ..
							'results/pkgmaster.devuan.org/merged/dists/' .. n .. '/Release.SORTED ' ..
							'| grep -v "@@" | grep "^+" | grep "Packages.xz$" |  cut -c 77- >results/NEW_Release_' .. n .. '.txt')
-- TODO - Maybe check the date in Release, though since they are updated daily, is there any point?  Perhaps it's for checking amprolla got run?
				os.execute('rm -f results/' .. pu.host .. '/merged/dists/' .. n .. '/Release 2>/dev/null')
			    else
-- TODO - compare to the pkgmaster copy.
			    end

			    local dfile, e = io.open('results/NEW_Release_' .. n .. '.txt', "r")
			    if nil == dfile then W("opening results/NEW_Release_" .. n .. " file - " .. e) else
				local diff = dfile:read("*a")
				if "" ~= diff then
		    		    downloads(pu.host, pu.path, n, diff)
		    		end
			    end
			end
		    end

		end

		downloads(pu.host, pu.path, "", "")
		while 0 < APT.checkExes(downloadLock .. "META-" .. pu.host .. ".lock") do os.execute("sleep 10") end
		os.execute( "rm -f results/" .. pu.host .. ".curl 2>/dev/null; rm -f results/curl-META-" .. pu.host .. ".lock 2>/dev/null; " ..
			    "mv results/curl-META-" .. pu.host .. ".log results/curl-Packages-" .. pu.host .. ".log")

		for i, n in pairs(releases) do
		    local dfile, e = io.open('results/NEW_Release_' .. n .. '.txt', "r")
		    if nil == dfile then W("opening results/NEW_Release_" .. n .. ".txt file - " .. e) else
			local diff = dfile:read("*a")
			for l in diff:gmatch("\n*([^\n]+)\n*") do
			    postDownload(pu.host, n, "/" .. l)
			end
		    end
		    if APT.options.referenceSite.value == pu.host then
			-- In case it wasn't dealt with already.
			os.execute('touch results/NEW_Packages_' .. n .. '.test.txt')
		    end
		end


		for i, n in pairs(releases) do
		    local nfile, e = io.open('results/NEW_Packages_' .. n .. '.test.txt', "r")
		    if nil == nfile then W("opening results/NEW_Packages_" .. n .. ".test.txt file - " .. e) else
			for l in nfile:lines() do
			    local p = l:match('(pool/.*%.deb)')
			    if nil ~= p then
				downloads(pu.host, pu.path, nil, p)
			    end
			end
		    end
		end
		downloads(pu.host, pu.path, nil, "")
		while 0 < APT.checkExes(downloadLock .. "META-" .. pu.host .. ".lock") do os.execute("sleep 10") end
		for i, n in pairs(releases) do
		    local nfile, e = io.open('results/NEW_Packages_' .. n .. '.test.txt', "r")
		    if nil == nfile then W("opening results/NEW_Packages_" .. n .. ".test.txt file - " .. e) else
			for l in nfile:lines() do
			    local v, p, sz, sha = l:match(' | (.+) | (pool/.+%.deb) | (%d.+) | (%x.+) |')
			    if nil ~= p then
				if APT.checkFile('results/' .. pu.host .. "/merged/" .. p) then
				    local status, fsz = APT.execute('ls -l results/' .. pu.host .. "/merged/" .. p .. ' | cut -d " " -f 5-5')
				    if APT.testing("Integrity") then
					if sz ~= fsz:sub(2, -2) then	-- The sub bit is to slice off the EOLs at each end.
					    E('Package size mismatch - results/' .. pu.host .. "/merged/" .. p, 'http', 'Integrity', pu.host)
					    print('|' .. sz .. '~=' .. fsz:sub(2, -2) .. '|')
					else
					    local status, fsha = APT.execute('sha256sum results/' .. pu.host .. "/merged/" .. p .. ' | cut -d " " -f 1')
					    if sha ~= fsha:sub(2, -2) then E('Package SHA256 sum mismatch - results/' .. pu.host .. "/merged/" .. p, 'http', 'Integrity', pu.host) end
-- TODO - maybe check the PGP key, though packages are mostly not signed.
					end
				    end
				    if APT.testing("Updated") then
					if sz ~= fsz:sub(2, -2) then
					    E('Package size mismatch - results/' .. pu.host .. "/merged/" .. p, 'http', 'Updated', pu.host)
					end
				    end
				else
				    E('Failed to download - results/' .. pu.host .. "/merged/" .. p, 'http', 'Updated', pu.host)
				end
			    end
			end
		    end
		end
	    end

	    APT.results["timeout"] = false
	else
	    APT.results["timeout"] = true
	end
    end

    if APT.origin and APT.options.referenceSite.value ~= pu.host then
	if not APT.keep then os.execute("rm -fr results/" .. pu.host .. " 2>/dev/null") end
	os.execute('rm STATUS_' .. pu.host .. '* 2>/dev/null')
    end

    local min, max, spd = 999999999999, 0
    for i, mt in pairs({'Release', 'Packages', 'META'}) do
	if APT.checkFile("results/curl-" .. mt .. "-" .. pu.host .. ".log") then
	    for l in io.lines("results/curl-" .. mt .. "-" .. pu.host .. ".log") do
		local speed, crrnt = l:match('^%c *%d+ +%d+k? +%d+ +%d+k? +%d+ +%d+ +(%d+k?) +%d+ +[%d%-]+:[%d%-]+:[%d%-]+ +[%d%-]+:[%d%-]+:[%d%-]+ +[%d%-]+:[%d%-]+:[%d%-]+ +(%d+k?)')
		if nil ~= speed then
		    if 'k' == speed:sub(-1, -1) then speed = speed:sub(1, -2) .. '000' end
		    if 'k' == crrnt:sub(-1, -1) then crrnt = crrnt:sub(1, -2) .. '000' end
		    speed = tonumber(speed)
		    crrnt = tonumber(crrnt)
		    if speed < min and speed ~= 0 then min = speed end
		    if speed > max                then max = speed end
		    if crrnt < min and crrnt ~= 0 then min = crrnt end
		    if crrnt > max                then max = crrnt end
		end
	    end
	end
    end
    APT.results["speed"] = {min = min, max = max}

    local f = pu.host
    if "" ~= ip then f = f .. "_" .. ip end
    local rfile, e = io.open("results/" .. f .. ".lua", "w+")
    if nil == rfile then C("opening results file - " .. e) else
	rfile:write(APT.dumpTable(APT.results, "", "results") .. "\nreturn results\n")
        rfile:close()
    end
    APT.logPost()
    APT.logFile:close()
else
    local fadt = io.popen("ls -dl results_old 2>/dev/null | cut -d '>' -f 2 | cut -d ' ' -f 2")
    local adt = fadt:read('*l')
    fadt:close()
    if nil ~= adt then os.execute('tar -c --xz ' .. adt .. ' -f ' .. adt .. '.tar.xz') end
    local dt = os.date('!%Y-%m-%d-%H-%M')
    local fodt = io.popen('TZ="GMT" date -r results/stamp +%Y-%m-%d-%H-%M 2>/dev/null', 'r')
    local odt = fodt:read('*l')
    fodt:close()
    if nil ~= odt then os.execute('                               rm -f results_old; ln -s results_' .. odt .. ' results_old 2>/dev/null') end
    if nil ~= dt  then os.execute('mkdir -p results_' .. dt .. '; rm -f results;     ln -s results_' .. dt  .. ' results     2>/dev/null') end
    os.execute('if [ -f results/stamp ]; then mv results/stamp results/stamp.old; else touch results/stamp.old -t 199901010000; fi;  touch results/stamp')
    os.execute("rm -f results/*.check  2>/dev/null")
    if not APT.keep then
	os.execute("rm -f results/*.curl 2>/dev/null")
	os.execute("rm -f results/*.log  2>/dev/null")
	os.execute("rm -f results/*.html 2>/dev/null")
	os.execute("rm -f results/*.txt  2>/dev/null")
    end

    APT.logFile, e = io.open("results/LOG_apt-panopticon.html", "a+")
    if nil == APT.logFile then C("opening log file - " .. e); return end
    APT.logPre()
    I("Starting tests " .. table.concat(APT.options.tests.value, ", "))
    os.execute("mkdir -p results")
    APT.mirrors = getMirrors()
    checkHost(APT.options.referenceSite.value)
    for i, n in pairs(releases) do
	while not APT.checkFile('results/NEW_Packages_' .. n .. '.test.txt') do os.execute("sleep 10") end
    end

    for k, m in pairs(APT.mirrors) do
	if "/" == m.BaseURL:sub(-1, -1) then
	    W("slash at end of BaseURL in mirror_list.txt!  " .. m.BaseURL, "", "", m.FQDN)
	    m.BaseURL = m.BaseURL:sub(1, -2)
	end
	if " " == m.BaseURL:sub(-1, -1) then
	    W("space at end of BaseURL in mirror_list.txt!  " .. m.BaseURL, "", "", m.FQDN)
	    m.BaseURL = m.BaseURL:sub(1, -2)
	end
	local pu = url.parse("http://" .. m.BaseURL)
	if APT.options.referenceSite.value ~= pu.host then
	    checkHost(m.BaseURL)
	    APT.checkExes("apt-panopticon.lua " .. sendArgs)
	    if APT.testing("Integrity") or APT.testing("Updated") then APT.checkExes(downloadLock) end
	end
    end

    while 1 <= APT.checkExes("apt-panopticon.lua " .. sendArgs) do os.execute("sleep 10") end

    os.execute("rm -f results/*.check; rm -f results/*.lock 2>/dev/null")

    -- Create the reports.
    for n, r in pairs(APT.options.reports.value) do
	if APT.checkFile("apt-panopticon-report-" .. r .. ".lua") then
	    I("Creating " .. r .. " report.")
	    APT.execute("./apt-panopticon-report-" .. r .. ".lua")
	end
    end

    if nil ~= adt then os.execute('rm -fr ' .. adt .. ' 2>/dev/null') end

    APT.logPost()
    APT.logFile:close()
end