From bcf74340a276aee6aaf0d0b2d6b07b29e50f0b25 Mon Sep 17 00:00:00 2001 From: dvs1 Date: Wed, 26 Feb 2025 20:08:29 +1000 Subject: YAR - mostly theming and link conversion. Includes the new "hide all but content" icon. --- SuckIt | 67 ++++++++++++-------- TODO.md | 4 +- default.template | 94 ++++++++++++++++++--------- notYetAnotherWiki.lua | 172 +++++++++++++++++++++++++++++++++++++++++--------- testing/index.md | 2 + 5 files changed, 254 insertions(+), 85 deletions(-) diff --git a/SuckIt b/SuckIt index d67844b..8950850 100755 --- a/SuckIt +++ b/SuckIt @@ -13,6 +13,10 @@ mkdir -p unsorted rm -fr users mkdir -p users +# Copy across things like images that where uploaded. +mkdir -p /opt/nyaw/Foswiki/pub/ +# TODO - Should rsync this instead. +cp -r /opt/Foswiki/pub/Main /opt/nyaw/Foswiki/pub/ filter=" -name _default -prune -o \ -name _empty -prune -o \ @@ -20,40 +24,46 @@ filter=" -name Trash -prune -o \ -name TWiki -prune -o \ " -URL="https://fos.wiki.devuan.org" +ogURL="https://fos.wiki.devuan.org" +ogWiki="Foswiki" time find /opt/Foswiki/data ${filter} \ -name "*.txt" -type f,l -printf "%P\n" | while read line do base=`echo "${line}" | cut -d '/' -f 1` file=`echo "${line}" | cut -d '/' -f 2- | rev | cut -b 5- | rev` - if [[ ! $file =~ (AdminGroup|AdminUser|AdminUserLeftBar|EditorGroup|GroupTemplate|GroupViewTemplate|NobodyGroup|PatternSkinUserViewTemplate|ProjectContributor|RegistrationAgent|SitePreferences|UnprocessedRegistrations|UnprocessedRegistrationsLog|UserHomepageHeader|UserList|UserListByDateJoined|UserListByLocation|UserList|UserListHeader|WebAtom|WebChanges|WebCreateNewTopic|WebHome|WebIndex|WebLeftBar|WebLeftBarExample|WebNotify|WebPreferences|WebRss|WebSearch|WebSearchAdvanced|WebTopicList|WikiGroups|WikiUsers)$ ]]; then + if [[ ! ${file} =~ (AdminGroup|AdminUser|AdminUserLeftBar|CommentPluginExamples|EditorGroup|GroupTemplate|GroupViewTemplate|NobodyGroup|PatternSkinUserViewTemplate|ProjectContributor|RegistrationAgent|SitePreferences|UnprocessedRegistrations|UnprocessedRegistrationsLog|UserHomepageHeader|UserList|UserListByDateJoined|UserListByLocation|UserList|UserListHeader|WebAtom|WebChanges|WebCreateNewTopic|WebHome|WebIndex|WebLeftBar|WebLeftBarExample|WebNotify|WebPreferences|WebRss|WebSearch|WebSearchAdvanced|WebTopicList|WikiGroups|WikiUsers)$ ]]; then + realURL=${ogWiki}/${base}/${file} time=`date --rfc-3339=seconds -ur /opt/Foswiki/data/${base}/${file}.txt | cut -d '+' -f 1` - mkdir -p Foswiki/$base - mkdir -p Foswiki/${base}/`dirname ${file}` - echo -e "ogWiki=Foswiki\nogURL=${URL}\nogBase=${base}\nogFile=${file}\ntimestamp=${time}\n" > Foswiki/${base}/${file}.md.md - echo "downloading ${URL}/${base}/${file}?cover=print" + mkdir -p ${ogWiki}/${base} + mkdir -p ${ogWiki}/${base}/`dirname ${file}` + echo -e "ogWiki=${ogWiki}\nogURL=${ogURL}\nrealURL=${realURL}\nogBase=${base}\nogFile=${file}\ntimestamp=${time}\n" > ${ogWiki}/${base}/${file}.md.md + echo "downloading ${ogURL}/${base}/${file}?cover=print" # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. - curl --silent --no-progress-meter ${URL}/${base}/${file}?cover=print -o Foswiki/${base}/${file}.HTM + curl --silent --no-progress-meter ${ogURL}/${base}/${file}?cover=print -o ${ogWiki}/${base}/${file}.HTM # Attempt to separate user profiles from user content. Doesn't work when people turn their profiles into content. if [[ "${base}" == "Main" ]]; then dest="unsorted" mkdir -p `dirname users/${file}` - sed -i -E Foswiki/${base}/${file}.HTM -e "s%UserForm%%w users/${file}_fos.SED" + sed -i -E ${ogWiki}/${base}/${file}.HTM -e "s%UserForm%%w users/${file}_fos.SED" if [ -s users/${file}_fos.SED ]; then dest="users" fi rm users/${file}_fos.SED rm -d `dirname users/${file}` >/dev/null 2>&1 mkdir -p `dirname ${dest}/${file}` - touch Foswiki/${base}/${file}.md - ln -sfr Foswiki/${base}/${file}.md ${dest}/${file}_fos.md - ln -sfr Foswiki/${base}/${file}.md.md ${dest}/${file}_fos.md.md - rm Foswiki/${base}/${file}.md + realURL=${dest}/${file} + echo -e "ogWiki=${ogWiki}\nogURL=${ogURL}\nrealURL=${realURL}_fos\nogBase=${base}\nogFile=${file}\ntimestamp=${time}\n" > ${ogWiki}/${base}/${file}.md.md + touch ${ogWiki}/${base}/${file}.md + ln -sfr ${ogWiki}/${base}/${file}.md ${dest}/${file}_fos.md + ln -sfr ${ogWiki}/${base}/${file}.md.md ${dest}/${file}_fos.md.md + rm ${ogWiki}/${base}/${file}.md fi fi done +# Copy across things like images that where uploaded. +cp -r /opt/pmwiki/uploads /opt/nyaw/PmWiki/ filter=" -not -name "*~" -a \ -not -name ".flock" -a \ @@ -61,32 +71,37 @@ filter=" -not -name ".lastmod" -a \ -not -name ".pageindex" -a \ " -URL="https://wiki.devuan.org" +ogURL="https://wiki.devuan.org" +ogWiki="PmWiki" time find /opt/pmwiki/wiki.d ${filter} \ -name "*.*" -type f,l -printf "%P\n" | while read line do base=`echo "${line}" | cut -d '.' -f 1` + file=`echo "${line}" | cut -d '.' -f 2` if [[ "${base}" != "Site" ]]; then - file=`echo "${line}" | cut -d '.' -f 2` + realURL=${ogWiki}/${base}/${file} time=`date --rfc-3339=seconds -ur /opt/pmwiki/wiki.d/${base}.${file} | cut -d '+' -f 1` - mkdir -p PmWiki/$base - echo -e "ogWiki=PmWiki\nogURL=${URL}\nogBase=${base}\nogFile=${file}\ntimestamp=${time}\n" > PmWiki/${base}/${file}.md.md -# echo "downloading ${URL}/?n=${base}.${file}?action=markdown" -# curl --no-progress-meter ${URL}/?n=${base}.${file}?action=markdown -o PmWiki/${base}/${file}.MARKDOWN - echo "downloading ${URL}/?n=${base}.${file}?action=print" - curl --no-progress-meter ${URL}/?n=${base}.${file}?action=print -o PmWiki/${base}/${file}.HTM + mkdir -p ${ogWiki}/${base} + echo -e "ogWiki=${ogWiki}\nogURL=${ogURL}\nrealURL=${realURL}\nogBase=${base}\nogFile=${file}\ntimestamp=${time}\n" > ${ogWiki}/${base}/${file}.md.md +# echo "downloading ${ogURL}/?n=${base}.${file}?action=markdown" +# curl --no-progress-meter ${ogURL}/?n=${base}.${file}?action=markdown -o ${ogWiki}/${base}/${file}.MARKDOWN + echo "downloading ${ogURL}/?n=${base}.${file}?action=print" + curl --no-progress-meter ${ogURL}/?n=${base}.${file}?action=print -o ${ogWiki}/${base}/${file}.HTM # Seems there's no way to tell user profiles apart from user content. Unless I can find a list of users somewhere. Don't think there is one. if [[ "${base}" == "Profiles" ]]; then - touch PmWiki/${base}/${file}.md - ln -sfr PmWiki/${base}/${file}.md unsorted/${file}_pm.md - ln -sfr PmWiki/${base}/${file}.md.md unsorted/${file}_pm.md.md - rm PmWiki/${base}/${file}.md + dest="unsorted" + realURL=${dest}/${file} + echo -e "ogWiki=${ogWiki}\nogURL=${ogURL}\nrealURL=${realURL}_pm\nogBase=${base}\nogFile=${file}\ntimestamp=${time}\n" > ${ogWiki}/${base}/${file}.md.md + touch ${ogWiki}/${base}/${file}.md + ln -sfr ${ogWiki}/${base}/${file}.md ${dest}/${file}_pm.md + ln -sfr ${ogWiki}/${base}/${file}.md.md ${dest}/${file}_pm.md.md + rm ${ogWiki}/${base}/${file}.md fi # TODO - groups are PmWiki/Onefang and PmWiki/Tiki -# pandoc -f markdown -t commonmark_x --self-contained PmWiki//${base}/${file}.MD >PmWiki/${base}/${file}.md -# pandoc -f html -t commonmark_x --self-contained PmWiki//${base}/${file}.HTM >PmWiki/${base}/${file}.md +# pandoc -f markdown -t commonmark_x --self-contained ${ogWiki}//${base}/${file}.MD >${ogWiki}/${base}/${file}.md +# pandoc -f html -t commonmark_x --self-contained ${ogWiki}//${base}/${file}.HTM >${ogWiki}/${base}/${file}.md fi done diff --git a/TODO.md b/TODO.md index 722ce99..bfaf0fd 100644 --- a/TODO.md +++ b/TODO.md @@ -12,6 +12,8 @@ Syntax highlighting in code blocks. Other colour shenanigans. +- There's a couple more "make this red" scattered around. + Might be useful to automatically convert anything looking like a URL into a linky. Check the timestamps on the files, only update if source is newer than destination. Meh, it's already 600 times faster than the pandoc version. @@ -40,7 +42,7 @@ Bugs - - /users/dunno/Devuan Cluster.HTML A very lengthy and complex document, I'll likely miss something, but chip away at the obvious. Lots of colour shenanigans. - unsorted/JensKorte/DevuanReleases_fos.HTML the header sorting links and colours on the table look tricky to unravel. Especially the extra header row for Daedalus, just to change it's colour. -- unsorted/Debdog_pm.HTML several tables of links encoded as code blocks, but they linkyness gets dropped, likely by pandoc. +- unsorted/Debdog_pm.HTML several tables of links encoded as code blocks, but there linkyness gets dropped, likely by pandoc. ## Try out diff --git a/default.template b/default.template index 9c5765f..f9a2746 100644 --- a/default.template +++ b/default.template @@ -7,61 +7,97 @@ -
+No cookies or scripts where harmed in the making of this web site. May contain low fat CSS.
+"' .. xlnk .. ' ' .. string.gsub(ur, '.*%.', '', 1) .. '.HTML') end - url = xlnk .. string.gsub(ur, '.*%.', '', 1) .. '.HTML' + if 'DUNNO/' == xlnk then print('OOPS! page not found - @' .. Context.path .. ' / ' .. Context.bit .. '\t' .. url .. ' -> ' .. xlnk .. ' ' .. string.gsub(ur, '.*%.', '', 1) .. '.HTML') end + end +-- if (nil ~= md) and (nil ~= md.realURL) then url = md.realURL +-- else + url = xlnk .. string.gsub(ur, '.*%.', '', 1) +-- end + if 'PmWiki/uploads/' == p then + url = '../' .. p .. string.gsub(ur, '%.', '.', 1) + elseif 'Foswiki/pub/' == p then + url = '../' .. p .. ur + else + url = url .. '.HTML' end end end end - return OgWriterLink(lab, url, tit) + return url +end + +local OgWriterLink = Writer.link -- So we can call the original from within mine, we are just changing the URL. +function Writer.link(lab, url, tit) + return OgWriterLink(lab, lunaLinky(url), tit) end +local OgWriterImage = Writer.image +function Writer.image(lab, url, tit) + return OgWriterImage(lab, lunaLinky(url), tit) +end + local Parse = Lunamark.reader.markdown.new(Writer, LunamarkOpts) diff --git a/testing/index.md b/testing/index.md index ca62e7f..5129b43 100644 --- a/testing/index.md +++ b/testing/index.md @@ -34,6 +34,8 @@ Test ~~strike~~ --out--. Needs the extension. [untalenz](https://untalenz.rocks/) +[nope.example.com](http://nope.example.com/) + https://sledjhamr.org/ # Lists -- cgit v1.1