From 5a671268340c107ee94f8244f01b6cb8ee9decd0 Mon Sep 17 00:00:00 2001 From: dvs1 Date: Sat, 25 Jan 2025 03:14:10 +1000 Subject: YAR see description for details. New everything page, lists all the pages and where they came from, with links. Now we know where the page came from, type and URL. Rejigged how metadata works. Do our own metadata substitution. Fixed up a bunch of bugs. General cleanups. --- SuckItPm | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) (limited to 'SuckItPm') diff --git a/SuckItPm b/SuckItPm index fe7efb3..fd0b048 100755 --- a/SuckItPm +++ b/SuckItPm @@ -20,6 +20,7 @@ do mkdir -p PmWiki/$base mkdir -p combined/$base echo "Converting ${URL}/?n=${base}.${file}?action=print -> PmWiki/${base}/${file}.md" + echo -e "ogWiki=PmWiki\nogURL=${URL}/?n=${base}.${file}\n" > PmWiki/${base}/${file}.md.md # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. # curl --no-progress-meter ${URL}/?n=${base}.${file}?action=markdown -o PmWiki/${base}/${file}.MD curl --no-progress-meter ${URL}/?n=${base}.${file}?action=print -o PmWiki/${base}/${file}.HTM @@ -29,6 +30,9 @@ do rm PmWiki/${base}/${file}.HTM mv xx00 PmWiki/${base}/${file}.HTM fi + if [ -f xx01 ]; then + rm xx01 + fi sed -i -E PmWiki/${base}/${file}.HTM \ -e "s/rel='nofollow'//g" \ -e "s/target='_blank'//g" \ @@ -53,7 +57,6 @@ do -e 's/\{#.*\}//g' \ -e '/^:::/d' \ # -e '/\[Site$/d' \ -# -e '/^Page last modified on /d' \ # -e '/^\[\]/d' \ # -e "s/\`\`\{=html\}\`<\/a>\`\{=html\}//g" \ # -e "s/^\`> PmWiki/${base}/${file}.md ln -frs PmWiki/${base}/${file}.md combined/${base}/${file}.md - - if [ -f xx01 ]; then - rm xx01 - fi + ln -frs PmWiki/${base}/${file}.md.md combined/${base}/${file}.md.md done popd -- cgit v1.1