From bcf74340a276aee6aaf0d0b2d6b07b29e50f0b25 Mon Sep 17 00:00:00 2001 From: dvs1 Date: Wed, 26 Feb 2025 20:08:29 +1000 Subject: YAR - mostly theming and link conversion. Includes the new "hide all but content" icon. --- SuckIt | 67 ++++++++++++-------- TODO.md | 4 +- default.template | 94 ++++++++++++++++++--------- notYetAnotherWiki.lua | 172 +++++++++++++++++++++++++++++++++++++++++--------- testing/index.md | 2 + 5 files changed, 254 insertions(+), 85 deletions(-) diff --git a/SuckIt b/SuckIt index d67844b..8950850 100755 --- a/SuckIt +++ b/SuckIt @@ -13,6 +13,10 @@ mkdir -p unsorted rm -fr users mkdir -p users +# Copy across things like images that where uploaded. +mkdir -p /opt/nyaw/Foswiki/pub/ +# TODO - Should rsync this instead. +cp -r /opt/Foswiki/pub/Main /opt/nyaw/Foswiki/pub/ filter=" -name _default -prune -o \ -name _empty -prune -o \ @@ -20,40 +24,46 @@ filter=" -name Trash -prune -o \ -name TWiki -prune -o \ " -URL="https://fos.wiki.devuan.org" +ogURL="https://fos.wiki.devuan.org" +ogWiki="Foswiki" time find /opt/Foswiki/data ${filter} \ -name "*.txt" -type f,l -printf "%P\n" | while read line do base=`echo "${line}" | cut -d '/' -f 1` file=`echo "${line}" | cut -d '/' -f 2- | rev | cut -b 5- | rev` - if [[ ! $file =~ (AdminGroup|AdminUser|AdminUserLeftBar|EditorGroup|GroupTemplate|GroupViewTemplate|NobodyGroup|PatternSkinUserViewTemplate|ProjectContributor|RegistrationAgent|SitePreferences|UnprocessedRegistrations|UnprocessedRegistrationsLog|UserHomepageHeader|UserList|UserListByDateJoined|UserListByLocation|UserList|UserListHeader|WebAtom|WebChanges|WebCreateNewTopic|WebHome|WebIndex|WebLeftBar|WebLeftBarExample|WebNotify|WebPreferences|WebRss|WebSearch|WebSearchAdvanced|WebTopicList|WikiGroups|WikiUsers)$ ]]; then + if [[ ! ${file} =~ (AdminGroup|AdminUser|AdminUserLeftBar|CommentPluginExamples|EditorGroup|GroupTemplate|GroupViewTemplate|NobodyGroup|PatternSkinUserViewTemplate|ProjectContributor|RegistrationAgent|SitePreferences|UnprocessedRegistrations|UnprocessedRegistrationsLog|UserHomepageHeader|UserList|UserListByDateJoined|UserListByLocation|UserList|UserListHeader|WebAtom|WebChanges|WebCreateNewTopic|WebHome|WebIndex|WebLeftBar|WebLeftBarExample|WebNotify|WebPreferences|WebRss|WebSearch|WebSearchAdvanced|WebTopicList|WikiGroups|WikiUsers)$ ]]; then + realURL=${ogWiki}/${base}/${file} time=`date --rfc-3339=seconds -ur /opt/Foswiki/data/${base}/${file}.txt | cut -d '+' -f 1` - mkdir -p Foswiki/$base - mkdir -p Foswiki/${base}/`dirname ${file}` - echo -e "ogWiki=Foswiki\nogURL=${URL}\nogBase=${base}\nogFile=${file}\ntimestamp=${time}\n" > Foswiki/${base}/${file}.md.md - echo "downloading ${URL}/${base}/${file}?cover=print" + mkdir -p ${ogWiki}/${base} + mkdir -p ${ogWiki}/${base}/`dirname ${file}` + echo -e "ogWiki=${ogWiki}\nogURL=${ogURL}\nrealURL=${realURL}\nogBase=${base}\nogFile=${file}\ntimestamp=${time}\n" > ${ogWiki}/${base}/${file}.md.md + echo "downloading ${ogURL}/${base}/${file}?cover=print" # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. - curl --silent --no-progress-meter ${URL}/${base}/${file}?cover=print -o Foswiki/${base}/${file}.HTM + curl --silent --no-progress-meter ${ogURL}/${base}/${file}?cover=print -o ${ogWiki}/${base}/${file}.HTM # Attempt to separate user profiles from user content. Doesn't work when people turn their profiles into content. if [[ "${base}" == "Main" ]]; then dest="unsorted" mkdir -p `dirname users/${file}` - sed -i -E Foswiki/${base}/${file}.HTM -e "s%UserForm%%w users/${file}_fos.SED" + sed -i -E ${ogWiki}/${base}/${file}.HTM -e "s%UserForm%%w users/${file}_fos.SED" if [ -s users/${file}_fos.SED ]; then dest="users" fi rm users/${file}_fos.SED rm -d `dirname users/${file}` >/dev/null 2>&1 mkdir -p `dirname ${dest}/${file}` - touch Foswiki/${base}/${file}.md - ln -sfr Foswiki/${base}/${file}.md ${dest}/${file}_fos.md - ln -sfr Foswiki/${base}/${file}.md.md ${dest}/${file}_fos.md.md - rm Foswiki/${base}/${file}.md + realURL=${dest}/${file} + echo -e "ogWiki=${ogWiki}\nogURL=${ogURL}\nrealURL=${realURL}_fos\nogBase=${base}\nogFile=${file}\ntimestamp=${time}\n" > ${ogWiki}/${base}/${file}.md.md + touch ${ogWiki}/${base}/${file}.md + ln -sfr ${ogWiki}/${base}/${file}.md ${dest}/${file}_fos.md + ln -sfr ${ogWiki}/${base}/${file}.md.md ${dest}/${file}_fos.md.md + rm ${ogWiki}/${base}/${file}.md fi fi done +# Copy across things like images that where uploaded. +cp -r /opt/pmwiki/uploads /opt/nyaw/PmWiki/ filter=" -not -name "*~" -a \ -not -name ".flock" -a \ @@ -61,32 +71,37 @@ filter=" -not -name ".lastmod" -a \ -not -name ".pageindex" -a \ " -URL="https://wiki.devuan.org" +ogURL="https://wiki.devuan.org" +ogWiki="PmWiki" time find /opt/pmwiki/wiki.d ${filter} \ -name "*.*" -type f,l -printf "%P\n" | while read line do base=`echo "${line}" | cut -d '.' -f 1` + file=`echo "${line}" | cut -d '.' -f 2` if [[ "${base}" != "Site" ]]; then - file=`echo "${line}" | cut -d '.' -f 2` + realURL=${ogWiki}/${base}/${file} time=`date --rfc-3339=seconds -ur /opt/pmwiki/wiki.d/${base}.${file} | cut -d '+' -f 1` - mkdir -p PmWiki/$base - echo -e "ogWiki=PmWiki\nogURL=${URL}\nogBase=${base}\nogFile=${file}\ntimestamp=${time}\n" > PmWiki/${base}/${file}.md.md -# echo "downloading ${URL}/?n=${base}.${file}?action=markdown" -# curl --no-progress-meter ${URL}/?n=${base}.${file}?action=markdown -o PmWiki/${base}/${file}.MARKDOWN - echo "downloading ${URL}/?n=${base}.${file}?action=print" - curl --no-progress-meter ${URL}/?n=${base}.${file}?action=print -o PmWiki/${base}/${file}.HTM + mkdir -p ${ogWiki}/${base} + echo -e "ogWiki=${ogWiki}\nogURL=${ogURL}\nrealURL=${realURL}\nogBase=${base}\nogFile=${file}\ntimestamp=${time}\n" > ${ogWiki}/${base}/${file}.md.md +# echo "downloading ${ogURL}/?n=${base}.${file}?action=markdown" +# curl --no-progress-meter ${ogURL}/?n=${base}.${file}?action=markdown -o ${ogWiki}/${base}/${file}.MARKDOWN + echo "downloading ${ogURL}/?n=${base}.${file}?action=print" + curl --no-progress-meter ${ogURL}/?n=${base}.${file}?action=print -o ${ogWiki}/${base}/${file}.HTM # Seems there's no way to tell user profiles apart from user content. Unless I can find a list of users somewhere. Don't think there is one. if [[ "${base}" == "Profiles" ]]; then - touch PmWiki/${base}/${file}.md - ln -sfr PmWiki/${base}/${file}.md unsorted/${file}_pm.md - ln -sfr PmWiki/${base}/${file}.md.md unsorted/${file}_pm.md.md - rm PmWiki/${base}/${file}.md + dest="unsorted" + realURL=${dest}/${file} + echo -e "ogWiki=${ogWiki}\nogURL=${ogURL}\nrealURL=${realURL}_pm\nogBase=${base}\nogFile=${file}\ntimestamp=${time}\n" > ${ogWiki}/${base}/${file}.md.md + touch ${ogWiki}/${base}/${file}.md + ln -sfr ${ogWiki}/${base}/${file}.md ${dest}/${file}_pm.md + ln -sfr ${ogWiki}/${base}/${file}.md.md ${dest}/${file}_pm.md.md + rm ${ogWiki}/${base}/${file}.md fi # TODO - groups are PmWiki/Onefang and PmWiki/Tiki -# pandoc -f markdown -t commonmark_x --self-contained PmWiki//${base}/${file}.MD >PmWiki/${base}/${file}.md -# pandoc -f html -t commonmark_x --self-contained PmWiki//${base}/${file}.HTM >PmWiki/${base}/${file}.md +# pandoc -f markdown -t commonmark_x --self-contained ${ogWiki}//${base}/${file}.MD >${ogWiki}/${base}/${file}.md +# pandoc -f html -t commonmark_x --self-contained ${ogWiki}//${base}/${file}.HTM >${ogWiki}/${base}/${file}.md fi done diff --git a/TODO.md b/TODO.md index 722ce99..bfaf0fd 100644 --- a/TODO.md +++ b/TODO.md @@ -12,6 +12,8 @@ Syntax highlighting in code blocks. Other colour shenanigans. +- There's a couple more "make this red" scattered around. + Might be useful to automatically convert anything looking like a URL into a linky. Check the timestamps on the files, only update if source is newer than destination. Meh, it's already 600 times faster than the pandoc version. @@ -40,7 +42,7 @@ Bugs - - /users/dunno/Devuan Cluster.HTML   A very lengthy and complex document, I'll likely miss something, but chip away at the obvious. Lots of colour shenanigans. - unsorted/JensKorte/DevuanReleases_fos.HTML the header sorting links and colours on the table look tricky to unravel. Especially the extra header row for Daedalus, just to change it's colour. -- unsorted/Debdog_pm.HTML several tables of links encoded as code blocks, but they linkyness gets dropped, likely by pandoc. +- unsorted/Debdog_pm.HTML several tables of links encoded as code blocks, but there linkyness gets dropped, likely by pandoc. ## Try out diff --git a/default.template b/default.template index 9c5765f..f9a2746 100644 --- a/default.template +++ b/default.template @@ -7,61 +7,97 @@ - +
- -
-
-
$body$
-
+
+
+
$body$
+
+
+
$history$
-
$footer$
+

$footer$

+

No cookies or scripts where harmed in the making of this web site. May contain low fat CSS.

+
diff --git a/notYetAnotherWiki.lua b/notYetAnotherWiki.lua index 4207905..29ed9fb 100755 --- a/notYetAnotherWiki.lua +++ b/notYetAnotherWiki.lua @@ -25,7 +25,7 @@ local GlobalMetaData = { -- has alink, link, vlink; CSS has active, link, visited, and hover. devuanDevuanalink = '#03a4ff', devuanDevuanlink = '#0076b6', devuanDevuanvlink = '#6aa4db', devuanDevuanhlink = '#03a4ff', devuanSDevuanalink = '#98c3db', devuanSDevuanlink = '#ffffff', devuanSDevuanvlink = '#ffffff', devuanSDevuanhlink = '#98c3db', - karenPurple = '#8800ff', onefangPurple = '#cc00ff', + karenPurple = '#8800ff', onefangPurple = '#cc00ff', onefangGreen = '#42ff00', PinkFloyd = '#AA00AA', DeepPurple = '#220022', -- From an ancient site of mine, which went from PinkFloyd to DeepPurple as a background gradient. favicon = 'nYAW_icon.png', logo = 'nYAW.png', footer = 'Powered by notYetAnotherWiki version 0.0. ', @@ -155,6 +155,49 @@ end +local readMdMd = function(name, metadata) + local h1 = io.open(name .. '.md') + if nil == h1 then +-- print('Could not open ' .. name .. '.md') + return {} + else + for l in h1:lines() do + for k, v in string.gmatch(l, "(%w+)%s*=%s*(.+)") do + if nil == v then + print(name .. ' ' .. k) + else + metadata[k] = v + end + end + end + end + return metadata +end + + + +local commonLinky = function(l, body, u, url, beg, en, beg0, en0, bump) + if nil == url then +-- print('OOPS! unknown linky - @' .. l .. '\t\t\t' .. string.sub(body, beg - 9, en) .. ' ' .. string.sub(body, en + 1, en0)) + else + local md = readMdMd(url, {}) +-- if nil ~= md then + if nil ~= md.realURL then url = md.realURL end +-- end + body = string.sub(body, 1, beg - bump) .. url .. string.sub(body, en0 + 1) + here = here + string.len(url) + end + if 1 == bump then + here = here + 1 + beg, en = RE.find(body, [['https://fos.wiki.devuan.org/']], here) + else + beg, en = RE.find(body, [["'https://wiki.devuan.org/"]], here) + end + return beg, en, body, here +end + + + --------------------------------------------------------------------------------- -- Actually start doing things. @@ -166,12 +209,25 @@ else print("Can't open everything.md for writing.") end - -- Scan the subdirectories looking for our files. local Directory = arg[1] toSub('') if nil == Directory then Directory = '.' end +-- Sort out realURL for symlinked .md.md files. +for l in io.popen('find -L ' .. Directory .. ' -name "*.md.md" -xtype l -printf "%P\n"'):lines() do + local metadata = readMdMd(string.sub(l, 1, -4), {}) +-- FIXME - if this already exists, compare the timestamps, most recent wins. + metadata.realURL = string.sub(l, 1, -7) + local a, e = io.open(l, 'w') + if nil == a then print('Could not open ' .. l .. ' - ' .. e) else + for k, v in pairs(metadata) do + a:write(k .. '=' .. v .. '\n') + end + a:close() + end +end + for l in io.popen('find -L ' .. Directory .. ' -name "*.HTM" -type f,l -printf "%P\n"'):lines() do -- print('pandoc converting ' .. l .. ' -> ' .. string.sub(l, 1, -4) .. 'md') -- Open the HTM files and do the initial cleanups, then pandoc them. @@ -203,11 +259,37 @@ for l in io.popen('find -L ' .. Directory .. ' -name "*.HTM" -type f,l -printf " {'rel="nofollow"'} -> blank / {"rel='nofollow'"} -> blank / {"target='_blank'"} -> blank / {"" ([%nl])* } -> blank / - {'style="' ([^"])+ '"'} -> '' / {"style='" ([^'])+ "'"} -> '' / +-- {'style="' ([^"])+ '"'} -> blank / {"style='" ([^'])+ "'"} -> blank / . )* ~}]], { blank = function(a) return '' end } ):match(body) body = result -- body = RE.gsub(body, [=[{""}]=], '') -- FIXME + local here = 1 + beg, en = RE.find(body, [['https://fos.wiki.devuan.org/']], here) + while nil ~= beg do + here = beg + 1 + local beg0, en0 + local url = nil + if '"' == string.sub(body, beg - 1, beg - 1) then + beg0, en0 = RE.find(body, [['"']], en) + url = string.sub(body, en + 1, en0 - 1) + end + if "'" == string.sub(body, beg - 1, beg - 1) then + beg0, en0 = RE.find(body, [["'"]], en) + url = string.sub(body, en + 1, en0) + end + + if nil ~= url then + if ('pub/' == string.sub(url, 1, 4)) then +-- FIXME? - evil hack? + url = 'Foswiki/' .. url + else + url = nil + end + end + beg, en, body, here = commonLinky(l, body, 'https://fos.wiki.devuan.org/', url, beg, en, beg0, en0, 1) + end + writeString(l .. '_NEW', body) elseif 'PmWiki' == string.sub(l, 1, 6) then local beg, en = RE.find(body, [['']]) if nil ~= beg then body = string.sub(body, en + 2) end @@ -230,11 +312,30 @@ for l in io.popen('find -L ' .. Directory .. ' -name "*.HTM" -type f,l -printf " {"class='wikilink'"} -> blank / {'rel="nofollow"'} -> blank / {"rel='nofollow'"} -> blank / {"target='_blank'"} -> blank / - {'style="' ([^"])+ '"'} -> '' / {"style='" ([^'])+ "'"} -> '' / +-- {'style="' ([^"])+ '"'} -> blank / {"style='" ([^'])+ "'"} -> blank / {" "
 ' .. xlnk .. ' ' .. string.gsub(ur, '.*%.', '', 1) .. '.HTML') end
-		    url = xlnk .. string.gsub(ur, '.*%.', '', 1) .. '.HTML'
+		    if 'DUNNO/' == xlnk then print('OOPS! page not found - @' .. Context.path .. ' / ' .. Context.bit .. '\t' .. url .. ' -> ' .. xlnk .. ' ' .. string.gsub(ur, '.*%.', '', 1) .. '.HTML') end
+		end
+--		if (nil ~= md) and (nil ~= md.realURL) then	url = md.realURL
+--		else
+								url = xlnk .. string.gsub(ur, '.*%.', '', 1)
+--		end
+		if 'PmWiki/uploads/' == p then
+		    url = '../' .. p .. string.gsub(ur, '%.', '.', 1)
+		elseif 'Foswiki/pub/' == p then
+		    url = '../' .. p .. ur
+		else
+		    url = url .. '.HTML'
 		end
 	    end
 	end
     end
-    return OgWriterLink(lab, url, tit)
+    return url
+end
+
+local OgWriterLink = Writer.link	-- So we can call the original from within mine, we are just changing the URL.
+function Writer.link(lab, url, tit)
+    return OgWriterLink(lab, lunaLinky(url), tit)
 end
+local OgWriterImage = Writer.image
+function Writer.image(lab, url, tit)
+    return OgWriterImage(lab, lunaLinky(url), tit)
+end
+
 local Parse = Lunamark.reader.markdown.new(Writer, LunamarkOpts)
 
 
diff --git a/testing/index.md b/testing/index.md
index ca62e7f..5129b43 100644
--- a/testing/index.md
+++ b/testing/index.md
@@ -34,6 +34,8 @@ Test ~~strike~~ --out--.  Needs the extension.
 
 [untalenz](https://untalenz.rocks/)
 
+[nope.example.com](http://nope.example.com/)
+
 https://sledjhamr.org/
 
 # Lists
-- 
cgit v1.1