aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/SuckIt
diff options
context:
space:
mode:
authordvs12025-01-25 03:14:10 +1000
committerdvs12025-01-25 03:14:10 +1000
commit5a671268340c107ee94f8244f01b6cb8ee9decd0 (patch)
tree6d9dc191d25efb486c46a7b1865ef19cb70df8ca /SuckIt
parentReplace cmark-gfm and lcmark with luamark. (diff)
downloadnotYetAnotherWiki-5a671268340c107ee94f8244f01b6cb8ee9decd0.zip
notYetAnotherWiki-5a671268340c107ee94f8244f01b6cb8ee9decd0.tar.gz
notYetAnotherWiki-5a671268340c107ee94f8244f01b6cb8ee9decd0.tar.bz2
notYetAnotherWiki-5a671268340c107ee94f8244f01b6cb8ee9decd0.tar.xz
YAR see description for details.
New everything page, lists all the pages and where they came from, with links. Now we know where the page came from, type and URL. Rejigged how metadata works. Do our own metadata substitution. Fixed up a bunch of bugs. General cleanups.
Diffstat (limited to '')
-rwxr-xr-xSuckItFos9
-rwxr-xr-xSuckItPm12
2 files changed, 9 insertions, 12 deletions
diff --git a/SuckItFos b/SuckItFos
index 1e6e48d..0f6f8a8 100755
--- a/SuckItFos
+++ b/SuckItFos
@@ -22,6 +22,7 @@ do
22 mkdir -p combined/$base 22 mkdir -p combined/$base
23 mkdir -p combined/${base}/`dirname ${file}` 23 mkdir -p combined/${base}/`dirname ${file}`
24 echo "Converting ${URL}/${base}/${file}?cover=print -> Foswiki/${base}/${file}.md" 24 echo "Converting ${URL}/${base}/${file}?cover=print -> Foswiki/${base}/${file}.md"
25 echo -e "ogWiki=Foswiki\nogURL=${URL}/${base}/${file}\n" > Foswiki/${base}/${file}.md.md
25 # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. 26 # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time.
26 curl --silent --no-progress-meter ${URL}/${base}/${file}?cover=print -o Foswiki/${base}/${file}.HTM 27 curl --silent --no-progress-meter ${URL}/${base}/${file}?cover=print -o Foswiki/${base}/${file}.HTM
27 cp Foswiki/${base}/${file}.HTM Foswiki/${base}/${file}.HTM_ORIGINAL 28 cp Foswiki/${base}/${file}.HTM Foswiki/${base}/${file}.HTM_ORIGINAL
@@ -47,6 +48,9 @@ do
47 rm Foswiki/${base}/${file}.md 48 rm Foswiki/${base}/${file}.md
48 mv xx00 Foswiki/${base}/${file}.md 49 mv xx00 Foswiki/${base}/${file}.md
49 fi 50 fi
51 if [ -f xx01 ]; then
52 rm xx01
53 fi
50 54
51 # Attempt to clean things up, badly. 55 # Attempt to clean things up, badly.
52 sed -i -E Foswiki/${base}/${file}.md \ 56 sed -i -E Foswiki/${base}/${file}.md \
@@ -57,12 +61,7 @@ do
57# -e 's/\{\.pattern.*\}//g' \ 61# -e 's/\{\.pattern.*\}//g' \
58# -e 's/\{\.pattern.*//g' \ 62# -e 's/\{\.pattern.*//g' \
59 63
60 echo -e "****\n[Original page](${URL}/${base}/${file}) where maybe you can edit it." >> Foswiki/${base}/${file}.md
61 ln -frs Foswiki/${base}/${file}.md combined/${base}/${file}.md 64 ln -frs Foswiki/${base}/${file}.md combined/${base}/${file}.md
62
63 if [ -f xx01 ]; then
64 rm xx01
65 fi
66done 65done
67 66
68popd 67popd
diff --git a/SuckItPm b/SuckItPm
index fe7efb3..fd0b048 100755
--- a/SuckItPm
+++ b/SuckItPm
@@ -20,6 +20,7 @@ do
20 mkdir -p PmWiki/$base 20 mkdir -p PmWiki/$base
21 mkdir -p combined/$base 21 mkdir -p combined/$base
22 echo "Converting ${URL}/?n=${base}.${file}?action=print -> PmWiki/${base}/${file}.md" 22 echo "Converting ${URL}/?n=${base}.${file}?action=print -> PmWiki/${base}/${file}.md"
23 echo -e "ogWiki=PmWiki\nogURL=${URL}/?n=${base}.${file}\n" > PmWiki/${base}/${file}.md.md
23 # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. 24 # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time.
24# curl --no-progress-meter ${URL}/?n=${base}.${file}?action=markdown -o PmWiki/${base}/${file}.MD 25# curl --no-progress-meter ${URL}/?n=${base}.${file}?action=markdown -o PmWiki/${base}/${file}.MD
25 curl --no-progress-meter ${URL}/?n=${base}.${file}?action=print -o PmWiki/${base}/${file}.HTM 26 curl --no-progress-meter ${URL}/?n=${base}.${file}?action=print -o PmWiki/${base}/${file}.HTM
@@ -29,6 +30,9 @@ do
29 rm PmWiki/${base}/${file}.HTM 30 rm PmWiki/${base}/${file}.HTM
30 mv xx00 PmWiki/${base}/${file}.HTM 31 mv xx00 PmWiki/${base}/${file}.HTM
31 fi 32 fi
33 if [ -f xx01 ]; then
34 rm xx01
35 fi
32 sed -i -E PmWiki/${base}/${file}.HTM \ 36 sed -i -E PmWiki/${base}/${file}.HTM \
33 -e "s/rel='nofollow'//g" \ 37 -e "s/rel='nofollow'//g" \
34 -e "s/target='_blank'//g" \ 38 -e "s/target='_blank'//g" \
@@ -53,7 +57,6 @@ do
53 -e 's/\{#.*\}//g' \ 57 -e 's/\{#.*\}//g' \
54 -e '/^:::/d' \ 58 -e '/^:::/d' \
55# -e '/\[Site$/d' \ 59# -e '/\[Site$/d' \
56# -e '/^Page last modified on /d' \
57# -e '/^\[\]/d' \ 60# -e '/^\[\]/d' \
58# -e "s/\`<a id='trailstart'>\`\{=html\}\`<\/a>\`\{=html\}//g" \ 61# -e "s/\`<a id='trailstart'>\`\{=html\}\`<\/a>\`\{=html\}//g" \
59# -e "s/^\`<img /<img /g" \ 62# -e "s/^\`<img /<img /g" \
@@ -61,13 +64,8 @@ do
61# -e "s/^\`\`\`//g" \ 64# -e "s/^\`\`\`//g" \
62# -e "s/\`\{=html\}//g" 65# -e "s/\`\{=html\}//g"
63 66
64 # Don't need this, the parts we are grabbing already include that link at the bottom.
65# echo -e "****\n[Original page](${URL}/${base}/${page}) where maybe you can edit it." >> PmWiki/${base}/${file}.md
66 ln -frs PmWiki/${base}/${file}.md combined/${base}/${file}.md 67 ln -frs PmWiki/${base}/${file}.md combined/${base}/${file}.md
67 68 ln -frs PmWiki/${base}/${file}.md.md combined/${base}/${file}.md.md
68 if [ -f xx01 ]; then
69 rm xx01
70 fi
71done 69done
72 70
73popd 71popd