From 485e1065098ba2b8ae1397f8f48b027f148d66d3 Mon Sep 17 00:00:00 2001 From: dvs1 Date: Sun, 26 Jan 2025 14:14:17 +1000 Subject: Moar everything! Cleaned up the display. Added a time stamp. SORTED! --- SuckItFos | 3 ++- SuckItPm | 3 ++- TODO.md | 2 -- notYetAnotherWiki.lua | 40 ++++++++++++++++++++++++++++++++-------- 4 files changed, 36 insertions(+), 12 deletions(-) diff --git a/SuckItFos b/SuckItFos index 0f6f8a8..dc65505 100755 --- a/SuckItFos +++ b/SuckItFos @@ -17,12 +17,13 @@ find /opt/Foswiki/data ${filter} \ do base=`echo "${line}" | cut -d '/' -f 1` file=`echo "${line}" | cut -d '/' -f 2- | rev | cut -b 5- | rev` + time=`date --rfc-3339=seconds -ur /opt/Foswiki/data/${base}/${file}.txt | cut -d '+' -f 1` mkdir -p Foswiki/$base mkdir -p Foswiki/${base}/`dirname ${file}` mkdir -p combined/$base mkdir -p combined/${base}/`dirname ${file}` echo "Converting ${URL}/${base}/${file}?cover=print -> Foswiki/${base}/${file}.md" - echo -e "ogWiki=Foswiki\nogURL=${URL}/${base}/${file}\n" > Foswiki/${base}/${file}.md.md + echo -e "ogWiki=Foswiki\nogURL=${URL}\nogBase=${base}\nogFile=${file}\ntimestamp=${time}\n" > Foswiki/${base}/${file}.md.md # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. curl --silent --no-progress-meter ${URL}/${base}/${file}?cover=print -o Foswiki/${base}/${file}.HTM cp Foswiki/${base}/${file}.HTM Foswiki/${base}/${file}.HTM_ORIGINAL diff --git a/SuckItPm b/SuckItPm index fd0b048..246f485 100755 --- a/SuckItPm +++ b/SuckItPm @@ -17,10 +17,11 @@ find /opt/pmwiki/wiki.d ${filter} \ do base=`echo "${line}" | cut -d '.' -f 1` file=`echo "${line}" | cut -d '.' -f 2` + time=`date --rfc-3339=seconds -ur /opt/pmwiki/wiki.d/${base}.${file} | cut -d '+' -f 1` mkdir -p PmWiki/$base mkdir -p combined/$base echo "Converting ${URL}/?n=${base}.${file}?action=print -> PmWiki/${base}/${file}.md" - echo -e "ogWiki=PmWiki\nogURL=${URL}/?n=${base}.${file}\n" > PmWiki/${base}/${file}.md.md + echo -e "ogWiki=PmWiki\nogURL=${URL}\nogBase=${base}\nogFile=${file}\ntimestamp=${time}\n" > PmWiki/${base}/${file}.md.md # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. # curl --no-progress-meter ${URL}/?n=${base}.${file}?action=markdown -o PmWiki/${base}/${file}.MD curl --no-progress-meter ${URL}/?n=${base}.${file}?action=print -o PmWiki/${base}/${file}.HTM diff --git a/TODO.md b/TODO.md index b36116d..b087755 100644 --- a/TODO.md +++ b/TODO.md @@ -2,8 +2,6 @@ ## Do these -everything page - shorten the displayed URLs, don't need to see the base of the other wikis over and over. - Convert the links in the converted pages to point to their new home. We have ogURL for the source now, should help. Bugs - diff --git a/notYetAnotherWiki.lua b/notYetAnotherWiki.lua index dc64fa2..bddf63a 100755 --- a/notYetAnotherWiki.lua +++ b/notYetAnotherWiki.lua @@ -248,21 +248,45 @@ for name, file in pairs(Files) do end end + + +-- Figure out the original title and link for the original wiki. +-- This functions assume the above file and sub scan has completed. +local whichWiki = function(metadata) + local title, link = '', '' + if 'PmWiki' == metadata.ogWiki then + title = '[' .. metadata.ogBase .. '.' .. metadata.ogFile .. ']' + link = '(' .. metadata.ogURL .. '/?n=' .. metadata.ogBase .. '.' .. metadata.ogFile .. ')' + end + if 'Foswiki' == metadata.ogWiki then + title = '[' .. metadata.ogBase .. '/' .. metadata.ogFile .. ']' + link = '(' .. metadata.ogURL .. '/' .. metadata.ogBase .. '/' .. metadata.ogFile .. ')' + end + return title, link +end + + -- Create an "everything" page, for URL links to every file.HTML. --- TODO - sort the list of pages. local bdy, h = Files['everything'].body, io.open('everything.md', 'a+') -bdy = bdy .. '\n\n| page | converted | Foswiki | PmWiki | \n| ---- | --------- | ------- | ------ | ' +bdy = bdy .. '\n\n| page | converted | original page | last edited UTC | \n| ---- | --------- | ------- | --------------- | ' +pages = {} for name, file in pairs(Files) do local metadata = derefTable(Files[name].metadata, true) if 'everything' ~= name then - local ln, fw, pw = 'DUNNO', '', '' - if 'PmWiki' == metadata.ogWiki then pw = '[' .. metadata.ogURL .. '](' .. metadata.ogURL .. ')' end - if 'Foswiki' == metadata.ogWiki then fw = '[' .. metadata.ogURL .. '](' .. metadata.ogURL .. ')' end + local ln, fw, pw, ts = 'DUNNO', '', '', '' + local title, link = whichWiki(metadata) + if 'PmWiki' == metadata.ogWiki then pw = 'PmWiki ' .. title .. link end + if 'Foswiki' == metadata.ogWiki then fw = 'Foswiki ' .. title .. link end + if nil ~= metadata.timestamp then ts = metadata.timestamp end if nil ~= file.bit then ln = file.bit end - bdy = bdy .. '\n| ' .. name .. ' | [' .. ln .. '](<' .. name .. '.HTML>) | ' .. fw .. ' | ' .. pw .. ' |' + table.insert(pages, '\n| ' .. name .. ' | [' .. ln .. '](<' .. name .. '.HTML>) | ' .. fw .. ' ' .. pw .. ' | ' .. ts .. ' |') end end +table.sort(pages, function(a, b) return (string.lower(a) < string.lower(b)) end) +for i, f in ipairs(pages) do + bdy = bdy .. f +end h:write(bdy) h:close() toFile('everything', 'body', parse(bdy)) @@ -330,7 +354,6 @@ local linkFrom = function(source, dest) end - -- More of this actually doing things nonsense. -- Loop through Subs, doing whichPage and inheritance. @@ -375,7 +398,8 @@ for name, file in pairs(Files) do for m, x in pairs(globalData) do if nil == metadata[m] then metadata[m] = x end end if nil ~= metadata.ogURL then - body = body .. '\n\n---\n\n[Original page](' .. metadata.ogURL .. '), maybe you can edit it.\n' + local title, link = whichWiki(metadata) + body = body .. '\n\n---\n\n[Original page]' .. link .. ', maybe you can edit it.\n' end -- Figure out this pages trail links. -- cgit v1.1