#!/usr/bin/env luajit local args = {...} --[[ TODO - What to do about HTTPS://deb.devuan.org/ redirects. Some mirrors give a 404. Sledjhamr gives a 404, coz it's not listening on 443 for deb.devuan.org. Some mirrors give a 200. They shouldn't have the proper certificate, but are giving a result anyway. ]] origin = false verbosity = -1 keep = false -- TODO - Should actually implement this. fork = true options = { referenceSite = { typ = "string", help = "", value = "pkgmaster.devuan.org", }, roundRobin = { typ = "string", help = "", value = "deb.devuan.org", }, tests = { typ = "table", help = "", value = { "IPv4", "IPv6", -- "ftp", "http", "https", -- "rsync", "DNSRR", "Protocol", -- "URLSanity", "Integrity", -- "Updated", }, }, timeout = { typ = "number", help = "", value = 15, }, reports = { typ = "table", help = "", value = { "email", -- "Nagios", -- "Prometheus", -- "RRD", "web", }, }, } local defaultURL = {scheme = "http"} local releases = {"jessie", "ascii", "beowulf", "ceres"} local releaseFiles = { -- Release file. "/Release", -- 3.7 MB "/Release.gpg", -- -- "/InRelease", -- 3.7 MB -- "/main/binary-all/Packages.xz", -- 2.6 GB for all that changed recently. -- Contents files. -- 3.3 GB -- "/main/Contents-all.xz", -- "/main/Contents-amd64.xz", -- "/main/Contents-arm64.xz", -- "-security/main/Contents-all.xz", -- "-security/main/Contents-amd64.xz", -- "-security/main/Contents-arm64.xz", } local notExist = { "ceres-security" -- This will never exist, it's our code name for the testing suite. } local referenceDebs = { -- Debian package. "merged/pool/DEBIAN/main/d/dash/dash_0.5.8-2.4_amd64.deb", -- Debian security package. NOTE this one should always be redirected? "merged/pool/DEBIAN-SECURITY/updates/main/a/apt/apt-transport-https_1.4.9_amd64.deb", } local referenceDevs = { -- Devuan package. NOTE this one should not get redirected, but that's more a warning than an error. "merged/pool/DEVUAN/main/d/desktop-base/desktop-base_2.0.3_all.deb", "merged/pool/DEVUAN/main/u/util-linux/util-linux_2.32.1-0.1+devuan2.1_amd64.deb", } local arg = {} local sendArgs = "" local logFile local curlStatus = { [1 ] = "Unsupported protocol. This build of curl has no support for this protocol.", [2 ] = "Failed to initialize.", [3 ] = "URL malformed. The syntax was not correct.", [4 ] = "A feature or option that was needed to perform the desired request was not enabled or was explicitly disabled at build-time. To make curl able to do this, you probably need another build of libcurl!", [5 ] = "Couldn't resolve proxy. The given proxy host could not be resolved.", [6 ] = "Couldn't resolve host. The given remote host was not resolved.", [7 ] = "Failed to connect to host.", [8 ] = "Weird server reply. The server sent data curl couldn't parse.", [9 ] = "FTP access denied. The server denied login or denied access to the particular resource or directory you wanted to reach. Most often you tried to change to a directory that doesn't exist on the server.", [10] = "While waiting for the server to connect back when an active FTP session is used, an error code was sent over the control connection or similar.", [11] = "FTP weird PASS reply. Curl couldn't parse the reply sent to the PASS request.", [12] = "During an active FTP session while waiting for the server to connect, the CURLOPT_ACCEPTTIMEOUT_MS (or the internal default) timeout expired.", [13] = "FTP weird PASV reply, Curl couldn't parse the reply sent to the PASV request.", [14] = "FTP weird 227 format. Curl couldn't parse the 227-line the server sent.", [15] = "FTP can't get host. Couldn't resolve the host IP we got in the 227-line.", [16] = "A problem was detected in the HTTP2 framing layer. This is somewhat generic and can be one out of several problems, see the error buffer for details.", [17] = "FTP couldn't set binary. Couldn't change transfer method to binary.", [18] = "Partial file. Only a part of the file was transferred.", [19] = "FTP couldn't download/access the given file, the RETR (or similar) command failed.", [21] = "FTP quote error. A quote command returned error from the server.", [22] = "HTTP page not retrieved. The requested url was not found or returned another error with the HTTP error code being 400 or above. This return code only appears if -f, --fail is used.", [23] = "Write error. Curl couldn't write data to a local filesystem or similar.", [25] = "FTP couldn't STOR file. The server denied the STOR operation, used for FTP uploading.", [26] = "Read error. Various reading problems.", [27] = "Out of memory. A memory allocation request failed.", [28] = "Operation timeout. The specified time-out period was reached according to the conditions.", [30] = "FTP PORT failed. The PORT command failed. Not all FTP servers support the PORT command, try doing a transfer using PASV instead!", [31] = "FTP couldn't use REST. The REST command failed. This command is used for resumed FTP transfers.", [33] = "HTTP range error. The range \"command\" didn't work.", [34] = "HTTP post error. Internal post-request generation error.", [35] = "SSL connect error. The SSL handshaking failed.", [36] = "FTP bad download resume. Couldn't continue an earlier aborted download.", [37] = "FILE couldn't read file. Failed to open the file. Permissions?", [38] = "LDAP cannot bind. LDAP bind operation failed.", [39] = "LDAP search failed.", [41] = "Function not found. A required LDAP function was not found.", [42] = "Aborted by callback. An application told curl to abort the operation.", [43] = "Internal error. A function was called with a bad parameter.", [45] = "Interface error. A specified outgoing interface could not be used.", [47] = "Too many redirects. When following redirects, curl hit the maximum amount.", [48] = "Unknown option specified to libcurl. This indicates that you passed a weird option to curl that was passed on to libcurl and rejected. Read up in the manual!", [49] = "Malformed telnet option.", [51] = "The peer's SSL certificate or SSH MD5 fingerprint was not OK.", [52] = "The server didn't reply anything, which here is considered an error.", [53] = "SSL crypto engine not found.", [54] = "Cannot set SSL crypto engine as default.", [55] = "Failed sending network data.", [56] = "Failure in receiving network data.", [58] = "Problem with the local certificate.", [59] = "Couldn't use specified SSL cipher.", [60] = "Peer certificate cannot be authenticated with known CA certificates.", [61] = "Unrecognized transfer encoding.", [62] = "Invalid LDAP URL.", [63] = "Maximum file size exceeded.", [64] = "Requested FTP SSL level failed.", [65] = "Sending the data requires a rewind that failed.", [66] = "Failed to initialise SSL Engine.", [67] = "The user name, password, or similar was not accepted and curl failed to log in.", [68] = "File not found on TFTP server.", [69] = "Permission problem on TFTP server.", [70] = "Out of disk space on TFTP server.", [71] = "Illegal TFTP operation.", [72] = "Unknown TFTP transfer ID.", [73] = "File already exists (TFTP).", [74] = "No such user (TFTP).", [75] = "Character conversion failed.", [76] = "Character conversion functions required.", [77] = "Problem with reading the SSL CA cert (path? access rights?).", [78] = "The resource referenced in the URL does not exist.", [79] = "An unspecified error occurred during the SSH session.", [80] = "Failed to shut down the SSL connection.", [81] = "Socket is not ready for send/recv wait till it's ready and try again. This return code is only returned from curl_easy_recv and curl_easy_send.", [82] = "Could not load CRL file, missing or wrong format (added in 7.19.0).", [83] = "Issuer check failed (added in 7.19.0).", [84] = "The FTP PRET command failed", [85] = "RTSP: mismatch of CSeq numbers", [86] = "RTSP: mismatch of Session Identifiers", [87] = "unable to parse FTP file list", [88] = "FTP chunk callback reported error", [89] = "No connection available, the session will be queued", [90] = "SSL public key does not matched pinned public key", [91] = "Status returned failure when asked with CURLOPT_SSL_VERIFYSTATUS.", [92] = "Stream error in the HTTP/2 framing layer.", [93] = "An API function was called from inside a callback.", [94] = "An authentication function returned an error.", [95] = "A problem was detected in the HTTP/3 layer. This is somewhat generic and can be one out of several problems, see the error buffer for details.", } local socket = require 'socket' local ftp = require 'socket.ftp' local http = require 'socket.http' local url = require 'socket.url' -- Use this to dump a table to a string. dumpTable = function (table, space, name) local r = "" if "" == space then r = r .. space .. name .. " =\n" else r = r .. space .. "[" .. name .. "] =\n" end r = r .. space .. "{\n" r = r .. dumpTableSub(table, space .. " ") if "" == space then r = r .. space .. "}\n" else r = r .. space .. "},\n" end return r end dumpTableSub = function (table, space) local r = "" for k, v in pairs(table) do if type(k) == "string" then k = '"' .. k .. '"' end if type(v) == "table" then r = r .. dumpTable(v, space, k) elseif type(v) == "string" then r = r .. space .. "[" .. k .. "] = '" .. v .. "';\n" elseif type(v) == "function" then r = r .. space .. "[" .. k .. "] = function ();\n" elseif type(v) == "userdata" then r = r .. space .. "userdata " .. "[" .. k .. "];\n" elseif type(v) == "boolean" then if (v) then r = r .. space .. "[" .. k .. "] = true;\n" else r = r .. space .. "[" .. k .. "] = false;\n" end else r = r .. space .. "[" .. k .. "] = " .. v .. ";\n" end end return r end local ip = "" local results = {} local logPre = function() if nil ~= logFile then logFile:write("
\n") logFile:write("\n") end end local logPost = function() if nil ~= logFile then logFile:write(" \n") end end local log = function(v, t, s, prot, test, host) local x = "" if nil == prot then prot = "" end if nil == test then test = "" end x = x .. prot if "" ~= test then if #x > 0 then x = x .. " " end x = x .. test end if nil ~= host then if #x > 0 then x = x .. " " end x = x .. host end if #x > 0 then t = t .. "(" .. x .. ")" if "" ~= prot then if "" == test then if nil == results[prot] then results[prot] = {errors = 0; warnings = 0} end if v == 0 then results[prot].errors = results[prot].errors + 1 end if v == 1 then results[prot].warnings = results[prot].warnings + 1 end else if nil == results[prot] then results[prot] = {errors = 0; warnings = 0} end if nil == results[prot][test] then results[prot][test] = {errors = 0; warnings = 0} end if v == 0 then results[prot][test].errors = results[prot][test].errors + 1 end if v == 1 then results[prot][test].warnings = results[prot][test].warnings + 1 end end end end if v <= verbosity then if 3 <= verbosity then t = os.date() .. " " .. t end print(t .. ": " .. s) end if nil ~= logFile then local colour = "white" if -1 == v then colour = "fuchsia" end -- CRITICAL if 0 == v then colour = "red " end -- ERROR if 1 == v then colour = "yellow " end -- WARNING if 2 == v then colour = "white " end -- INFO if 3 == v then colour = "gray " end -- DEBUG logFile:write(os.date() .. " " .. t .. ": " .. s .. "\n") logFile:flush() end end local D = function(s) log(3, "DEBUG ", s) end local I = function(s) log(2, "INFO ", s) end local W = function(s, p, t, h) log(1, "WARNING ", s, p, t, h) end local E = function(s, p, t, h) log(0, "ERROR ", s, p, t, h) end local C = function(s) log(-1, "CRITICAL", s) end local mirrors = {} local testing = function(t, host) for i, v in pairs(options.tests.value) do if t == v then local h = mirrors[host] if nil == h then return true end if true == h["Protocols"][t] then return true else D("Skipping " .. t .. " checks for " .. host) end end end return false end local execute = function (s) D(" executing" .. s .. "
")
--[[ Damn os.execute()
Lua 5.1 says it returns "a status code, which is system-dependent"
Lua 5.2 says it returns true/nil, "exit"/"signal", the status code.
I'm getting 7168 or 0. No idea what the fuck that is.
local ok, rslt, status = os.execute(s)
]]
local f = io.popen(s .. ' ; echo "$?"', 'r')
local status = ""
local result = ""
-- The last line will be the command's returned status, collect everything else in result.
for l in f:lines() do
result = result .. status .. "\n"
status = l
end
return status, result
end
local fork = function(s)
D(" forking " .. s .. "
")
os.execute(s .. " &")
end
local checkExes = function (exe)
local count = io.popen('ps x | grep "' .. exe .. '" | grep -v " grep " | wc -l'):read("*l")
D(count .. " " .. exe .. " commands still running.")
return tonumber(count)
end
local repoExists = function (r)
r = r:match("([%a-]*)")
if nil == r then return false end
for k, v in pairs(notExist) do
if v == r then return false end
end
return true
end
local IP = {}
gatherIPs = function (host)
if nil == IP[host] then
local IPs
local dig = io.popen('dig +keepopen +noall +nottlid +answer ' .. host .. ' A ' .. host .. ' AAAA ' .. host .. ' CNAME ' .. host .. ' SRV | sort -r | uniq')
repeat
IPs = dig:read("*l")
if nil ~= IPs then
for k, t, v in IPs:gmatch("([%w_%-%.]*)%.%s*IN%s*(%a*)%s*(.*)") do
if "." == v:sub(-1, -1) then v = v:sub(1, -2) end
if nil == IP[k] then IP[k] = {} end
IP[k][v] = t
D(" DNS record " .. host .. " == " .. k .. " type " .. t .. " -> " .. v)
if t == "CNAME" then
gatherIPs(v)
IP[k][v] = IP[v]
elseif v == "SRV" then
print("SVR record found, now what do we do?")
end
end
end
until nil == IPs
end
end
-- Returns FTP directory listing
local nlst = function (u)
local t = {}
local p = url.parse(u)
p.command = "nlst"
p.sink = ltn12.sink.table(t)
local r, e = ftp.get(p)
return r and table.concat(t), e
end
local timeouts = 0;
local totalTimeouts = 0
checkHEAD = function (host, URL, r, retry)
if nil == r then r = 0 end
if nil == retry then retry = 0 end
local check = "Checking file"
local PU = url.parse(URL, defaultURL)
local pu = url.parse(PU.scheme .. "://" .. host, defaultURL)
if 0 < r then
check = "Redirecting to"
end
if 0 < retry then
os.execute("sleep " .. math.random(1, 3))
check = "Retry " .. retry .. " " .. check
end
if 2 <= timeouts then
E("too many timeouts! " .. check .. " " .. host .. " -> " .. URL, PU.scheme, "", host)
return
end
if 4 <= (totalTimeouts) then
E("Way too many timeouts!", PU.scheme, "", host)
return
end
if 20 <= r then
E("too many redirects! " .. check .. " " .. host .. " -> " .. URL, PU.scheme, "", host)
return
end
if 4 <= retry then
E("too many retries! " .. check .. " " .. host .. " -> " .. URL, PU.scheme, "", host)
return
end
D(PU.scheme .. " :// " .. check .. " " .. host .. " -> " .. URL)
if not testing(PU.scheme, host) then D("Not testing " .. PU.scheme .. " " .. host .. " -> " .. URL); return end
-- TODO - Perhaps we should try it anyway, and mark it as a warning if it DOES work?
if "https" == PU.scheme and options.roundRobin.value == host then D("Not testing " .. PU.scheme .. " " .. host .. " -> " .. URL .. " mirrors shouldn't have the correct cert."); return end
--[[ Using curl command line -
-I - HEAD
--connect-to domain:port:IP:port - connect to IP, but use SNI from URL.
-header "" - add extra headers.
-L - DO follow redirects.
--max-redirs n - set maximum redirects, default is 50, -1 = unlimited.
--retry n - maximum retries, default is 0, no retries.
-o file - write to file instead of stdout.
--path-as-is - https://curl.haxx.se/libcurl/c/CURLOPT_PATH_AS_IS.html might be useful for URLSanity.
-s silent - don't output progress or error messages.
--connect-timeout n - timeout in seconds.
Should return with error code 28 on a timeout?
-D file - write the received headers to a file. This includes the status code and string.
]]
local fname = host .. "_" .. PU.host .. "_" .. PU.path:gsub("/", "_") .. ".txt"
local hdr = ""
local IP = ""
if pu.host ~= PU.host then
if "http" == PU.scheme then
hdr = '-H "Host: ' .. host .. '"'
end
IP = '--connect-to "' .. pu.host .. '::' .. PU.host .. ':"'
end
local cmd = 'ionice -c3 nice -n 19 curl -I --retry 0 -s --path-as-is --connect-timeout ' .. options.timeout.value .. ' --max-redirs 0 ' .. IP .. ' ' .. '-o /dev/null -D results/"HEADERS_' .. fname .. '" ' ..
hdr .. ' -w "#%{http_code} %{ssl_verify_result} %{url_effective}\\n" ' .. PU.scheme .. '://' .. host .. PU.path .. ' >>results/"STATUS_' .. fname .. '"'
local status, result = execute(cmd)
os.execute('cat results/"HEADERS_' .. fname .. '" >>results/"STATUS_' .. fname .. '" 2>/dev/null; rm results/"HEADERS_' .. fname .. '" 2>/dev/null')
if "0" ~= status then
local msg = curlStatus[0 + status]
if nil == msg then msg = "UNKNOWN CURL STATUS CODE!" end
E(" The curl command return an error code of " .. status .. " - " .. msg, PU.scheme, "", host)
if ("28" == status) or ("7" == status) then
E(" TIMEOUT " .. timeouts + 1 .. ", retry " .. retry + 1, PU.scheme, "", host)
timeouts = timeouts + 1
end
checkHEAD(host, URL, r, retry + 1, timeouts)
return
end
local rfile, e = io.open("results/STATUS_" .. fname, "r")
local code = "000"
local cstr = ""
local location = nil
if nil == rfile then W("opening results/STATUS_" .. fname .. " file - " .. e) else
for line in rfile:lines("*l") do
if "#" == line:sub(1, 1) then
code = line:sub(2, 4)
if ("https" == PU.scheme) and ("0" ~= line:sub(6, 6)) then E(" The certificate is invalid.", PU.scheme, "https", host) end
elseif "http" == line:sub(1, 4):lower() then
-- -2 coz the headers file gets a \r at the end.
cstr = line:sub(14, -2)
elseif "location" == line:sub(1, 8):lower() then
location = line:sub(11, -2)
end
end
end
os.execute('rm results/"STATUS_' .. fname .. '" 2>/dev/null')
if ("4" == tostring(code):sub(1, 1)) or ("5" == tostring(code):sub(1, 1)) then
E(" " .. code .. " " .. cstr .. ". " .. check .. " " .. host .. " -> " .. URL, PU.scheme, "", host)
else
I(" " .. code .. " " .. cstr .. ". " .. check .. " " .. host .. " -> " .. URL)
-- timeouts = timeouts - 1 -- Backoff the timeouts count if we managed to get through.
if nil ~= location then
pu = url.parse(location, defaultURL)
if (pu.scheme ~= PU.scheme) then
if testing("Protocol") then W(" protocol changed during redirect! " .. check .. " " .. host .. " -> " .. URL .. " -> " .. location, PU.scheme, "Protocol", host) end
if (pu.host == host) and pu.path == PU.path then D("Not testing protocol change " .. URL .. " -> " .. location); return end
end
if location == URL then
E(" redirect loop! " .. check .. " " .. host .. " -> " .. URL, PU.scheme, "", host)
elseif nil == pu.host then
I(" relative redirect. " .. check .. " " .. host .. " -> " .. URL .. " -> " .. location)
checkHEAD(host, PU.scheme .. "://" .. PU.host .. location, r + 1)
elseif (PU.host == pu.host) or (host == pu.host) then
checkHEAD(pu.host, location, r + 1)
else
--[[ The hard part here is that we end up throwing ALL of the test files at the redirected location.
Not good for deb.debian.org, which we should only be throwing .debs at.
What we do is loop through the DNS entries, and only test the specific protocol & file being tested here.
This is what I came up with for checking if we are already testing a specific URL.
Still duplicates a tiny bit, but much less than the previous find based method.
]]
local file = pu.host .. "://" .. pu.path
local f = io.popen(string.format('if [ ! -f results/%s.check ] ; then touch results/%s.check; echo -n "check"; fi', file:gsub("/", "_"), file:gsub("/", "_") )):read("*a")
if (nil == f) or ("check" == f) then
I(" Now checking redirected host " .. file)
checkHost(pu.host, pu.host, nil, "redir", pu.path)
else
D(" Already checking " .. file)
end
end
end
end
end
local checkTimeouts = function(host, scheme, URL)
if testing(scheme) then
totalTimeouts = totalTimeouts + timeouts; timeouts = 0
checkHEAD(host, scheme .. "://" .. URL)
if 4 <= (totalTimeouts) then
E("Way too many timeouts!", scheme, "", host)
return true
end
end
return false
end
local checkFiles = function (host, ip, path, file)
timeouts = 0
if nil == path then path = "" end
if nil ~= file then
if "redir" == ip then ip = host end
I(" Checking IP for file " .. host .. " -> " .. ip .. " " .. path .. " " .. file)
if checkTimeouts(host, "http", ip .. path .. "/" .. file) then return end
if checkTimeouts(host, "https", ip .. path .. "/" .. file) then return end
else
I(" Checking IP " .. host .. " -> " .. ip .. " " .. path)
for i, s in pairs(referenceDevs) do
if checkTimeouts(host, "http", ip .. path .. "/" .. s) then return end
if checkTimeouts(host, "https", ip .. path .. "/" .. s) then return end
end
for i, s in pairs(releases) do
for j, k in pairs(releaseFiles) do
if repoExists(s .. k) then
if checkTimeouts(host, "http", ip .. path .. "/merged/dists/" .. s .. k) then return end
if checkTimeouts(host, "https", ip .. path .. "/merged/dists/" .. s .. k) then return end
end
end
end
end
end
checkHost = function (orig, host, path, ip, file)
if nil == host then host = orig end
if nil == path then path = "" end
if nil == file then file = "" end
local ph = url.parse("http://" .. host)
if (nil ~= ip) and ("redir" ~= ip) then
local po = url.parse("http://" .. orig)
if "" ~= file then
D("checking redirected file " .. po.host .. " " .. file)
checkFiles(po.host, ip, path, file)
else
checkFiles(po.host, ip, path)
end
else
if orig == host then
D("checkHost " .. orig .. "" .. file)
if testing("IPv4") then fork("ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " -o " .. orig .. path .. " " .. file) end
else D("checkHost " .. orig .. " -> " .. host) end
local h = mirrors[ph.host]
if nil == h then return end
for k, v in pairs(h.IPs) do
if "table" == type(v) then
for k1, v1 in pairs(v) do
if v1 == "A" then
if testing("IPv4") then fork("ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " " .. orig .. path .. " " .. k1 .. " " .. file) end
elseif v1 == "AAAA" then
if testing("IPv6") then fork("ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " " .. orig .. path .. " " .. k1 .. " " .. file) end
end
end
else
if v == "A" then
if testing("IPv4") then fork("ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " " .. orig .. path .. " " .. k .. " " .. file) end
elseif v == "AAAA" then
if testing("IPv6") then fork("ionice -c3 ./apt-panopticon.lua " .. sendArgs .. " " .. orig .. path .. " " .. k .. " " .. file) end
end
end
end
end
end
local addDownload = function(host, URL, f, r, k)
f:write('url "' .. 'http://' .. host .. URL .. '/merged/dists/' .. r .. k .. '"\n')
f:write('output "results/' .. host .. '/merged/dists/' .. r .. k .. '"\n')
-- Curls "check timestamp and overwrite file" stuff sucks.
-- Can only do ONE timestamp check per command.
-- Will DELETE the existing file if the timestamp fails to download a new one, unless we change directory first, which wont work here.
os.execute("if [ -f results/" .. host .. "/merged/dists/" .. r .. k .. " ]; then mv" ..
" results/" .. host .. "/merged/dists/" .. r .. k ..
" results/" .. host .. "/merged/dists/" .. r .. k .. ".old; fi")
end
local postDownload = function(host, r, k)
os.execute("if [ -f results/" .. host .. "/merged/dists/" .. r .. k .. ".old ]" ..
" && [ ! -f results/" .. host .. "/merged/dists/" .. r .. k .. " ]; then cp" ..
" results/" .. host .. "/merged/dists/" .. r .. k .. ".old" ..
" results/" .. host .. "/merged/dists/" .. r .. k .. "; fi")
if testing("Integrity") then
if ".gpg" == k:sub(-4, -1) then
local status, out = execute("gpgv --keyring /usr/share/keyrings/devuan-keyring.gpg results/" .. host .. "/merged/dists/" .. r .. k ..
" results/" .. host .. "/merged/dists/" .. r .. k:sub(1, -5) .. " 2>/dev/null")
if "0" ~= status then E("GPG check failed - " .. host .. "/merged/dists/" .. r .. k, "http", "Integrity", host) end
end
end
end
local downloadLock = "flock -n results/curl-"
local download = "curl --connect-timeout " .. options.timeout.value .. " --create-dirs -L -z 'results/stamp.old' -v -R "
local downloads = function(host, URL, release, list)
if nil == URL then URL = "/" end
local lock = "%s-" .. host .. ".lock"
local log = " --stderr results/curl-%s_" .. host .. ".log"
local cm = "ionice -c3 nice -n 19 " .. downloadLock .. lock:format("META") .. " " .. download .. log:format("META") .. " -K results/" .. host .. ".curl"
if testing("IPv4") and (not testing("IPv6")) then cm = cm .. ' -4' end
if (not testing("IPv4")) and testing("IPv6") then cm = cm .. ' -6' end
f, e = io.open("results/" .. host .. ".curl", "a+")
if nil == f then C("opening curl file - " .. e); return end
if nil ~= list then
if "" ~= list then
for l in list:gmatch("\n*([^\n]+)\n*") do
addDownload(host, URL, f, release, "/" .. l)
end
f:close()
return
end
else
--[[
for i, s in pairs(referenceDevs) do
cm = cm .. " https://" .. host .. URL .. "/" .. s
end
for i, s in pairs(referenceDebs) do
cm = cm .. " https://" .. host .. URL .. "/" .. s
end
execute(cm)
]]
for i, s in pairs(releases) do
for j, k in pairs(releaseFiles) do
if repoExists(s .. k) then
addDownload(host, URL, f, s, k)
end
end
end
end
f:close()
fork(cm)
end
local getMirrors = function ()
local mirrors = {}
local host = ""
local m = {}
local active = true
local URL = "https://" .. options.referenceSite.value .. "/mirror_list.txt"
I("getting mirrors.")
local p, c, h = http.request(URL)
if nil == p then E(c .. " fetching " .. URL) else
for l in p:gmatch("\n*([^\n]+)\n*") do
local t, d = l:match("(%a*):%s*(.*)")
d = string.lower(d)
if "FQDN" == t then
if "" ~= host then
if active then mirrors[host] = m end
m = {}
active = true
end
host = d
m[t] = d
gatherIPs(host)
m["IPs"] = IP[host]
elseif "Protocols" == t then
local prot = {}
for w in d:gmatch("(%w+)") do
prot[w] = true;
end
m[t] = prot
elseif "Active" == t and nil == d:find("yes", 1, true) then
W("Mirror " .. host .. " is not active - " .. d, "", "", host)
active = false
-- TODO - Should do some input validation on BaseURL, and everything else.
else
m[t] = d
end
end
if "" ~= host and active then
mirrors[host] = m
end
end
if testing("DNSRR") then
mirrors[options.roundRobin.value] = { ["Protocols"] = { ["http"] = true; ["https"] = true; }; ["FQDN"] = 'deb.devuan.org'; ["Active"] = 'yes'; ["BaseURL"] = 'deb.devuan.org'; }
gatherIPs(options.roundRobin.value)
mirrors[options.roundRobin.value].IPs = IP[options.roundRobin.value]
end
local file, e = io.open("results/mirrors.lua", "w+")
if nil == file then C("opening mirrors file - " .. e) else
file:write(dumpTable(mirrors, "", "mirrors") .. "\nreturn mirrors\n")
file:close()
end
return mirrors
end
if 0 ~= #args then
local option = ""
for i, a in pairs(args) do
if ("--help" == a) or ("-h" == a) then
print("I should write some docs, huh? Read README.md for instructions.")
elseif "--version" == a then
print("apt-panopticon version 0.1 WIP development version")
elseif "-v" == a then
verbosity = verbosity + 1
sendArgs = sendArgs .. a .. " "
elseif "-q" == a then
verbosity = -1
sendArgs = sendArgs .. a .. " "
elseif "-k" == a then
keep = true
elseif "-n" == a then
fork = false
elseif "-o" == a then
origin = true
elseif "--" == a:sub(1, 2) then
local s, e = a:find("=")
if nil == s then e = -1 end
option = a:sub(3, e - 1)
local o = options[option]
if nil == o then
print("Unknown option --" .. option)
option = ""
else
option = a
sendArgs = sendArgs .. a .. " "
local s, e = a:find("=")
if nil == s then e = 0 end
option = a:sub(3, e - 1)
if "table" == options[option].typ then
local result = {}
for t in (a:sub(e + 1) .. ","):gmatch("([+%-]?%w*),") do
local f = t:sub(1, 1)
local n = t:sub(2, -1)
if ("+" ~= f) and ("-" ~= f) then
table.insert(result, t)
end
end
if 0 ~= #result then
options[option].value = result
else
for t in (a:sub(e + 1) .. ","):gmatch("([+%-]?%w*),") do
local f = t:sub(1, 1)
local n = t:sub(2, -1)
if "+" == f then
table.insert(options[option].value, n)
elseif "-" == f then
local r = {}
for i, k in pairs(options[option].value) do
if k ~= n then table.insert(r, k) end
end
options[option].value = r
end
end
end
else
options[option].value = a:sub(e + 1, -1)
end
option = ""
end
elseif "-" == a:sub(1, 1) then
print("Unknown option " .. a)
else
table.insert(arg, a)
end
end
end
--print(dumpTable(options.tests.value, "", "tests"))
if 0 < #arg then
if "/" == arg[1]:sub(-1, -1) then
W("slash at end of path! " .. arg[1])
arg[1] = arg[1]:sub(1, -2)
end
if " " == arg[1]:sub(-1, -1) then
W("space at end of path! " .. arg[1])
arg[1] = arg[1]:sub(1, -2)
end
local pu = url.parse("http://" .. arg[1])
if nil ~= arg[2] then
logFile, e = io.open("results/LOG_" .. pu.host .. "_" .. arg[2] .. ".html", "a+")
else
logFile, e = io.open("results/LOG_" .. pu.host .. ".html", "a+")
end
if nil == logFile then C("opening log file - " .. e); return end
logPre()
I("Starting tests for " ..arg[1] .. " with these tests - " .. table.concat(options.tests.value, ", "))
mirrors = loadfile("results/mirrors.lua")()
if nil ~= arg[2] then I(" Using IP " .. arg[2]); ip = arg[2] end
if nil ~= arg[3] then I(" Using file " .. arg[3]); end
for k, v in pairs{"ftp", "http", "https", "rsync"} do
if testing(v) then
local tests = {errors = 0; warnings = 0}
if testing("Integrity") then tests.Integrity = {errors = 0; warnings = 0} end
if testing("Protocol") then tests.Protocol = {errors = 0; warnings = 0} end
if testing("Updated") then tests.Updated = {errors = 0; warnings = 0} end
if testing("URLSanity") then tests.URLSanity = {errors = 0; warnings = 0} end
results[v] = tests
end
end
if testing("Integrity") or testing("Updated") then
if nil == arg[2] then
I("Starting file downloads for " .. pu.host)
-- if not keep then execute("rm -fr results/" .. pu.host) end
downloads(pu.host, pu.path)
-- checkExes("apt-panopticon.lua " .. sendArgs)
-- checkExes(downloadLock)
end
end
if origin then
checkFiles(pu.host, pu.host, pu.path);
else
checkHost(pu.host, pu.host, pu.path, arg[2], arg[3])
end
if testing("Integrity") or testing("Updated") then
if nil == arg[2] then
while 0 < checkExes(downloadLock .. "META-" .. pu.host .. ".lock") do os.execute("sleep 10") end
os.execute("rm -f results/" .. pu.host .. ".curl")
for i, n in pairs(releases) do
for l, o in pairs(releaseFiles) do
if repoExists(i .. o) then
postDownload(pu.host, n, o)
end
end
end
end
end
local f = pu.host
if "" ~= ip then f = f .. "_" .. ip end
local rfile, e = io.open("results/" .. f .. ".lua", "w+")
if nil == rfile then C("opening results file - " .. e) else
rfile:write(dumpTable(results, "", "results") .. "\nreturn results\n")
rfile:close()
end
logPost()
logFile:close()
else
os.execute("mkdir -p results; if [ -f results/stamp ]; then mv results/stamp results/stamp.old; else touch results/stamp.old -t 199901010000; fi; touch results/stamp")
if not keep then
os.execute("rm -f results/*.curl")
os.execute("rm -f results/*.log")
os.execute("rm -f results/*.html")
os.execute("rm -f results/*.txt")
end
os.execute("rm -f results/*.check")
logFile, e = io.open("results/LOG_apt-panopticon.html", "a+")
if nil == logFile then C("opening log file - " .. e); return end
logPre()
I("Starting tests " .. table.concat(options.tests.value, ", "))
os.execute("mkdir -p results")
mirrors = getMirrors()
checkHost(options.referenceSite.value)
for k, m in pairs(mirrors) do
if "/" == m.BaseURL:sub(-1, -1) then
W("slash at end of BaseURL in mirror_list.txt! " .. m.BaseURL, "", "", m.FQDN)
m.BaseURL = m.BaseURL:sub(1, -2)
end
if " " == m.BaseURL:sub(-1, -1) then
W("space at end of BaseURL in mirror_list.txt! " .. m.BaseURL, "", "", m.FQDN)
m.BaseURL = m.BaseURL:sub(1, -2)
end
local pu = url.parse("http://" .. m.BaseURL)
if options.referenceSite.value ~= pu.host then
checkHost(m.BaseURL)
checkExes("apt-panopticon.lua " .. sendArgs)
if testing("Integrity") or testing("Updated") then checkExes(downloadLock) end
end
end
while 1 <= checkExes("apt-panopticon.lua " .. sendArgs) do os.execute("sleep 10") end
if testing("Integrity") or testing("Updated") then
while 0 < checkExes(downloadLock) do os.execute("sleep 10") end
end
os.execute("rm -f results/*.check; rm -f results/*.lock")
-- Create the reports.
for n, r in pairs(options.reports.value) do
local report = "apt-panopticon-report-" .. r .. ".lua"
local rfile, e = io.open(report, "r")
if nil == rfile then C("opening " .. report .. " file - " .. e) else
rfile:close()
I("Creating " .. report .. " report.")
execute("./" .. report .. " ")
end
end
-- os.execute("rm -f results/*.lua")
logPost()
logFile:close()
end