aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/SuckIt
diff options
context:
space:
mode:
authordvs12025-01-22 21:01:59 +1000
committerdvs12025-01-22 21:01:59 +1000
commit4a435bf8ae0e9f009346d89ebc424d8884f33cea (patch)
tree5c0852c7a0bdb926db42dd5f17b168461503433a /SuckIt
parentTypo-- (diff)
downloadnotYetAnotherWiki-4a435bf8ae0e9f009346d89ebc424d8884f33cea.zip
notYetAnotherWiki-4a435bf8ae0e9f009346d89ebc424d8884f33cea.tar.gz
notYetAnotherWiki-4a435bf8ae0e9f009346d89ebc424d8884f33cea.tar.bz2
notYetAnotherWiki-4a435bf8ae0e9f009346d89ebc424d8884f33cea.tar.xz
Replace cmark-gfm and lcmark with luamark.
Plus YAR, yet another rewrite.
Diffstat (limited to '')
-rwxr-xr-xSuckItFos9
-rwxr-xr-xSuckItPm11
2 files changed, 1 insertions, 19 deletions
diff --git a/SuckItFos b/SuckItFos
index 11adaf9..1e6e48d 100755
--- a/SuckItFos
+++ b/SuckItFos
@@ -22,8 +22,6 @@ do
22 mkdir -p combined/$base 22 mkdir -p combined/$base
23 mkdir -p combined/${base}/`dirname ${file}` 23 mkdir -p combined/${base}/`dirname ${file}`
24 echo "Converting ${URL}/${base}/${file}?cover=print -> Foswiki/${base}/${file}.md" 24 echo "Converting ${URL}/${base}/${file}?cover=print -> Foswiki/${base}/${file}.md"
25# pandoc -f html -t markdown --self-contained ${URL}/${base}/${file} >Foswiki/${base}/${file}.md
26 # TODO - try curl, to see what is actually downloaded, and maybe not download unchanged pages. curl to .HTM
27 # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. 25 # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time.
28 curl --silent --no-progress-meter ${URL}/${base}/${file}?cover=print -o Foswiki/${base}/${file}.HTM 26 curl --silent --no-progress-meter ${URL}/${base}/${file}?cover=print -o Foswiki/${base}/${file}.HTM
29 cp Foswiki/${base}/${file}.HTM Foswiki/${base}/${file}.HTM_ORIGINAL 27 cp Foswiki/${base}/${file}.HTM Foswiki/${base}/${file}.HTM_ORIGINAL
@@ -40,13 +38,10 @@ do
40 -e 's/class="foswikiTopic"/class="FoswikiTopic"/g' \ 38 -e 's/class="foswikiTopic"/class="FoswikiTopic"/g' \
41 -e 's/class="foswiki[[:alpha:]]*"//g' \ 39 -e 's/class="foswiki[[:alpha:]]*"//g' \
42 -e "s/style='.*;'//g" 40 -e "s/style='.*;'//g"
43# -e "s/style='background-color: #.*;'//g" \
44# -e "s/style='font-size: .*;'//g"
45 41
46 pandoc -f html -t commonmark_x --self-contained Foswiki//${base}/${file}.HTM >Foswiki/${base}/${file}.md 42 pandoc -f html -t commonmark_x --self-contained Foswiki//${base}/${file}.HTM >Foswiki/${base}/${file}.md
47 cp Foswiki/${base}/${file}.md Foswiki/${base}/${file}.md_ORIGINAL 43 cp Foswiki/${base}/${file}.md Foswiki/${base}/${file}.md_ORIGINAL
48 44
49# csplit -ks Foswiki/${base}/${file}.md '%::: {.foswikiTopic}%' '/::: {.foswikiContentFooter}/'
50 csplit -ks Foswiki/${base}/${file}.md '%::: {.FoswikiTopic}%' '/::: {.patternInfo}/' 45 csplit -ks Foswiki/${base}/${file}.md '%::: {.FoswikiTopic}%' '/::: {.patternInfo}/'
51 if [ -f xx00 ]; then 46 if [ -f xx00 ]; then
52 rm Foswiki/${base}/${file}.md 47 rm Foswiki/${base}/${file}.md
@@ -63,10 +58,6 @@ do
63# -e 's/\{\.pattern.*//g' \ 58# -e 's/\{\.pattern.*//g' \
64 59
65 echo -e "****\n[Original page](${URL}/${base}/${file}) where maybe you can edit it." >> Foswiki/${base}/${file}.md 60 echo -e "****\n[Original page](${URL}/${base}/${file}) where maybe you can edit it." >> Foswiki/${base}/${file}.md
66
67# pandoc -t html -f commonmark_x --self-contained Foswiki/${base}/${file}.md > Foswiki/${base}/${file}.htm
68# cmark-gfm -t html -e footnotes -e table -e strikethrough Foswiki/${base}/${file}.md > Foswiki/${base}/${file}.body
69# ln -frs Foswiki/${base}/${file}.body combined/${base}/${file}.body
70 ln -frs Foswiki/${base}/${file}.md combined/${base}/${file}.md 61 ln -frs Foswiki/${base}/${file}.md combined/${base}/${file}.md
71 62
72 if [ -f xx01 ]; then 63 if [ -f xx01 ]; then
diff --git a/SuckItPm b/SuckItPm
index a7c1321..fe7efb3 100755
--- a/SuckItPm
+++ b/SuckItPm
@@ -17,12 +17,9 @@ find /opt/pmwiki/wiki.d ${filter} \
17do 17do
18 base=`echo "${line}" | cut -d '.' -f 1` 18 base=`echo "${line}" | cut -d '.' -f 1`
19 file=`echo "${line}" | cut -d '.' -f 2` 19 file=`echo "${line}" | cut -d '.' -f 2`
20# page="?n=${line}"
21 mkdir -p PmWiki/$base 20 mkdir -p PmWiki/$base
22 mkdir -p combined/$base 21 mkdir -p combined/$base
23 echo "Converting ${URL}/?n=${base}.${file}?action=print -> PmWiki/${base}/${file}.md" 22 echo "Converting ${URL}/?n=${base}.${file}?action=print -> PmWiki/${base}/${file}.md"
24# pandoc -f html -t markdown --self-contained ${URL}/?n=${base}.${file} >PmWiki/${base}/${file}.md
25 # TODO - try curl, to see what is actually downloaded, and maybe not download unchanged pages. curl to .HTM
26 # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. 23 # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time.
27# curl --no-progress-meter ${URL}/?n=${base}.${file}?action=markdown -o PmWiki/${base}/${file}.MD 24# curl --no-progress-meter ${URL}/?n=${base}.${file}?action=markdown -o PmWiki/${base}/${file}.MD
28 curl --no-progress-meter ${URL}/?n=${base}.${file}?action=print -o PmWiki/${base}/${file}.HTM 25 curl --no-progress-meter ${URL}/?n=${base}.${file}?action=print -o PmWiki/${base}/${file}.HTM
@@ -45,11 +42,9 @@ do
45 -e "s/class='vspace'//g" \ 42 -e "s/class='vspace'//g" \
46 -e "s/class='wikilink'//g" \ 43 -e "s/class='wikilink'//g" \
47 -e "s/style='.*;'//g" 44 -e "s/style='.*;'//g"
48# -e "s/style='background-color: #.*;'//g" \
49# -e "s/style='font-size: .*;'//g"
50 45
51 pandoc -f html -t commonmark_x --self-contained PmWiki//${base}/${file}.HTM >PmWiki/${base}/${file}.md
52# pandoc -f markdown -t commonmark_x --self-contained PmWiki//${base}/${file}.MD >PmWiki/${base}/${file}.md 46# pandoc -f markdown -t commonmark_x --self-contained PmWiki//${base}/${file}.MD >PmWiki/${base}/${file}.md
47 pandoc -f html -t commonmark_x --self-contained PmWiki//${base}/${file}.HTM >PmWiki/${base}/${file}.md
53 cp PmWiki/${base}/${file}.md PmWiki/${base}/${file}.md_ORIGINAL 48 cp PmWiki/${base}/${file}.md PmWiki/${base}/${file}.md_ORIGINAL
54 49
55 # Attempt to clean things up, badly. 50 # Attempt to clean things up, badly.
@@ -68,10 +63,6 @@ do
68 63
69 # Don't need this, the parts we are grabbing already include that link at the bottom. 64 # Don't need this, the parts we are grabbing already include that link at the bottom.
70# echo -e "****\n[Original page](${URL}/${base}/${page}) where maybe you can edit it." >> PmWiki/${base}/${file}.md 65# echo -e "****\n[Original page](${URL}/${base}/${page}) where maybe you can edit it." >> PmWiki/${base}/${file}.md
71
72# pandoc -t html -f commonmark_x --self-contained PmWiki/${base}/${file}.md > PmWiki/${base}/${file}.htm
73# cmark-gfm -t html -e footnotes -e table -e strikethrough PmWiki/${base}/${file}.md > PmWiki/${base}/${file}.body
74# ln -frs PmWiki/${base}/${file}.body combined/${base}/${file}.body
75 ln -frs PmWiki/${base}/${file}.md combined/${base}/${file}.md 66 ln -frs PmWiki/${base}/${file}.md combined/${base}/${file}.md
76 67
77 if [ -f xx01 ]; then 68 if [ -f xx01 ]; then