diff options
| author | dvs1 | 2025-01-26 14:14:17 +1000 |
|---|---|---|
| committer | dvs1 | 2025-01-26 14:14:17 +1000 |
| commit | 485e1065098ba2b8ae1397f8f48b027f148d66d3 (patch) | |
| tree | e048e71aa0aec72af08cd90422720b29e20dacb9 | |
| parent | everything TODO++ (diff) | |
| download | notYetAnotherWiki-485e1065098ba2b8ae1397f8f48b027f148d66d3.zip notYetAnotherWiki-485e1065098ba2b8ae1397f8f48b027f148d66d3.tar.gz notYetAnotherWiki-485e1065098ba2b8ae1397f8f48b027f148d66d3.tar.bz2 notYetAnotherWiki-485e1065098ba2b8ae1397f8f48b027f148d66d3.tar.xz | |
Moar everything!
Cleaned up the display.
Added a time stamp.
SORTED!
Diffstat (limited to '')
| -rwxr-xr-x | SuckItFos | 3 | ||||
| -rwxr-xr-x | SuckItPm | 3 | ||||
| -rw-r--r-- | TODO.md | 2 | ||||
| -rwxr-xr-x | notYetAnotherWiki.lua | 40 |
4 files changed, 36 insertions, 12 deletions
| @@ -17,12 +17,13 @@ find /opt/Foswiki/data ${filter} \ | |||
| 17 | do | 17 | do |
| 18 | base=`echo "${line}" | cut -d '/' -f 1` | 18 | base=`echo "${line}" | cut -d '/' -f 1` |
| 19 | file=`echo "${line}" | cut -d '/' -f 2- | rev | cut -b 5- | rev` | 19 | file=`echo "${line}" | cut -d '/' -f 2- | rev | cut -b 5- | rev` |
| 20 | time=`date --rfc-3339=seconds -ur /opt/Foswiki/data/${base}/${file}.txt | cut -d '+' -f 1` | ||
| 20 | mkdir -p Foswiki/$base | 21 | mkdir -p Foswiki/$base |
| 21 | mkdir -p Foswiki/${base}/`dirname ${file}` | 22 | mkdir -p Foswiki/${base}/`dirname ${file}` |
| 22 | mkdir -p combined/$base | 23 | mkdir -p combined/$base |
| 23 | mkdir -p combined/${base}/`dirname ${file}` | 24 | mkdir -p combined/${base}/`dirname ${file}` |
| 24 | echo "Converting ${URL}/${base}/${file}?cover=print -> Foswiki/${base}/${file}.md" | 25 | echo "Converting ${URL}/${base}/${file}?cover=print -> Foswiki/${base}/${file}.md" |
| 25 | echo -e "ogWiki=Foswiki\nogURL=${URL}/${base}/${file}\n" > Foswiki/${base}/${file}.md.md | 26 | echo -e "ogWiki=Foswiki\nogURL=${URL}\nogBase=${base}\nogFile=${file}\ntimestamp=${time}\n" > Foswiki/${base}/${file}.md.md |
| 26 | # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. | 27 | # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. |
| 27 | curl --silent --no-progress-meter ${URL}/${base}/${file}?cover=print -o Foswiki/${base}/${file}.HTM | 28 | curl --silent --no-progress-meter ${URL}/${base}/${file}?cover=print -o Foswiki/${base}/${file}.HTM |
| 28 | cp Foswiki/${base}/${file}.HTM Foswiki/${base}/${file}.HTM_ORIGINAL | 29 | cp Foswiki/${base}/${file}.HTM Foswiki/${base}/${file}.HTM_ORIGINAL |
| @@ -17,10 +17,11 @@ find /opt/pmwiki/wiki.d ${filter} \ | |||
| 17 | do | 17 | do |
| 18 | base=`echo "${line}" | cut -d '.' -f 1` | 18 | base=`echo "${line}" | cut -d '.' -f 1` |
| 19 | file=`echo "${line}" | cut -d '.' -f 2` | 19 | file=`echo "${line}" | cut -d '.' -f 2` |
| 20 | time=`date --rfc-3339=seconds -ur /opt/pmwiki/wiki.d/${base}.${file} | cut -d '+' -f 1` | ||
| 20 | mkdir -p PmWiki/$base | 21 | mkdir -p PmWiki/$base |
| 21 | mkdir -p combined/$base | 22 | mkdir -p combined/$base |
| 22 | echo "Converting ${URL}/?n=${base}.${file}?action=print -> PmWiki/${base}/${file}.md" | 23 | echo "Converting ${URL}/?n=${base}.${file}?action=print -> PmWiki/${base}/${file}.md" |
| 23 | echo -e "ogWiki=PmWiki\nogURL=${URL}/?n=${base}.${file}\n" > PmWiki/${base}/${file}.md.md | 24 | echo -e "ogWiki=PmWiki\nogURL=${URL}\nogBase=${base}\nogFile=${file}\ntimestamp=${time}\n" > PmWiki/${base}/${file}.md.md |
| 24 | # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. | 25 | # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. |
| 25 | # curl --no-progress-meter ${URL}/?n=${base}.${file}?action=markdown -o PmWiki/${base}/${file}.MD | 26 | # curl --no-progress-meter ${URL}/?n=${base}.${file}?action=markdown -o PmWiki/${base}/${file}.MD |
| 26 | curl --no-progress-meter ${URL}/?n=${base}.${file}?action=print -o PmWiki/${base}/${file}.HTM | 27 | curl --no-progress-meter ${URL}/?n=${base}.${file}?action=print -o PmWiki/${base}/${file}.HTM |
| @@ -2,8 +2,6 @@ | |||
| 2 | 2 | ||
| 3 | ## Do these | 3 | ## Do these |
| 4 | 4 | ||
| 5 | everything page - shorten the displayed URLs, don't need to see the base of the other wikis over and over. | ||
| 6 | |||
| 7 | Convert the links in the converted pages to point to their new home. We have ogURL for the source now, should help. | 5 | Convert the links in the converted pages to point to their new home. We have ogURL for the source now, should help. |
| 8 | 6 | ||
| 9 | Bugs - | 7 | Bugs - |
diff --git a/notYetAnotherWiki.lua b/notYetAnotherWiki.lua index dc64fa2..bddf63a 100755 --- a/notYetAnotherWiki.lua +++ b/notYetAnotherWiki.lua | |||
| @@ -248,21 +248,45 @@ for name, file in pairs(Files) do | |||
| 248 | end | 248 | end |
| 249 | end | 249 | end |
| 250 | 250 | ||
| 251 | |||
| 252 | |||
| 253 | -- Figure out the original title and link for the original wiki. | ||
| 254 | -- This functions assume the above file and sub scan has completed. | ||
| 255 | local whichWiki = function(metadata) | ||
| 256 | local title, link = '', '' | ||
| 257 | if 'PmWiki' == metadata.ogWiki then | ||
| 258 | title = '[' .. metadata.ogBase .. '.' .. metadata.ogFile .. ']' | ||
| 259 | link = '(' .. metadata.ogURL .. '/?n=' .. metadata.ogBase .. '.' .. metadata.ogFile .. ')' | ||
| 260 | end | ||
| 261 | if 'Foswiki' == metadata.ogWiki then | ||
| 262 | title = '[' .. metadata.ogBase .. '/' .. metadata.ogFile .. ']' | ||
| 263 | link = '(' .. metadata.ogURL .. '/' .. metadata.ogBase .. '/' .. metadata.ogFile .. ')' | ||
| 264 | end | ||
| 265 | return title, link | ||
| 266 | end | ||
| 267 | |||
| 268 | |||
| 251 | -- Create an "everything" page, for URL links to every file.HTML. | 269 | -- Create an "everything" page, for URL links to every file.HTML. |
| 252 | -- TODO - sort the list of pages. | ||
| 253 | local bdy, h = Files['everything'].body, io.open('everything.md', 'a+') | 270 | local bdy, h = Files['everything'].body, io.open('everything.md', 'a+') |
| 254 | bdy = bdy .. '\n\n| page | converted | Foswiki | PmWiki | \n| ---- | --------- | ------- | ------ | ' | 271 | bdy = bdy .. '\n\n| page | converted | original page | last edited UTC | \n| ---- | --------- | ------- | --------------- | ' |
| 272 | pages = {} | ||
| 255 | for name, file in pairs(Files) do | 273 | for name, file in pairs(Files) do |
| 256 | local metadata = derefTable(Files[name].metadata, true) | 274 | local metadata = derefTable(Files[name].metadata, true) |
| 257 | if 'everything' ~= name then | 275 | if 'everything' ~= name then |
| 258 | local ln, fw, pw = 'DUNNO', '', '' | 276 | local ln, fw, pw, ts = 'DUNNO', '', '', '' |
| 259 | if 'PmWiki' == metadata.ogWiki then pw = '[' .. metadata.ogURL .. '](' .. metadata.ogURL .. ')' end | 277 | local title, link = whichWiki(metadata) |
| 260 | if 'Foswiki' == metadata.ogWiki then fw = '[' .. metadata.ogURL .. '](' .. metadata.ogURL .. ')' end | 278 | if 'PmWiki' == metadata.ogWiki then pw = 'PmWiki ' .. title .. link end |
| 279 | if 'Foswiki' == metadata.ogWiki then fw = 'Foswiki ' .. title .. link end | ||
| 280 | if nil ~= metadata.timestamp then ts = metadata.timestamp end | ||
| 261 | if nil ~= file.bit then ln = file.bit | 281 | if nil ~= file.bit then ln = file.bit |
| 262 | end | 282 | end |
| 263 | bdy = bdy .. '\n| ' .. name .. ' | [' .. ln .. '](<' .. name .. '.HTML>) | ' .. fw .. ' | ' .. pw .. ' |' | 283 | table.insert(pages, '\n| ' .. name .. ' | [' .. ln .. '](<' .. name .. '.HTML>) | ' .. fw .. ' ' .. pw .. ' | ' .. ts .. ' |') |
| 264 | end | 284 | end |
| 265 | end | 285 | end |
| 286 | table.sort(pages, function(a, b) return (string.lower(a) < string.lower(b)) end) | ||
| 287 | for i, f in ipairs(pages) do | ||
| 288 | bdy = bdy .. f | ||
| 289 | end | ||
| 266 | h:write(bdy) | 290 | h:write(bdy) |
| 267 | h:close() | 291 | h:close() |
| 268 | toFile('everything', 'body', parse(bdy)) | 292 | toFile('everything', 'body', parse(bdy)) |
| @@ -330,7 +354,6 @@ local linkFrom = function(source, dest) | |||
| 330 | end | 354 | end |
| 331 | 355 | ||
| 332 | 356 | ||
| 333 | |||
| 334 | -- More of this actually doing things nonsense. | 357 | -- More of this actually doing things nonsense. |
| 335 | 358 | ||
| 336 | -- Loop through Subs, doing whichPage and inheritance. | 359 | -- Loop through Subs, doing whichPage and inheritance. |
| @@ -375,7 +398,8 @@ for name, file in pairs(Files) do | |||
| 375 | for m, x in pairs(globalData) do if nil == metadata[m] then metadata[m] = x end end | 398 | for m, x in pairs(globalData) do if nil == metadata[m] then metadata[m] = x end end |
| 376 | 399 | ||
| 377 | if nil ~= metadata.ogURL then | 400 | if nil ~= metadata.ogURL then |
| 378 | body = body .. '\n\n---\n\n[Original page](' .. metadata.ogURL .. '), maybe you can edit it.\n' | 401 | local title, link = whichWiki(metadata) |
| 402 | body = body .. '\n\n---\n\n[Original page]' .. link .. ', maybe you can edit it.\n' | ||
| 379 | end | 403 | end |
| 380 | 404 | ||
| 381 | -- Figure out this pages trail links. | 405 | -- Figure out this pages trail links. |
