aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/SuckItPm
diff options
context:
space:
mode:
Diffstat (limited to '')
-rwxr-xr-xSuckItPm11
1 files changed, 1 insertions, 10 deletions
diff --git a/SuckItPm b/SuckItPm
index a7c1321..fe7efb3 100755
--- a/SuckItPm
+++ b/SuckItPm
@@ -17,12 +17,9 @@ find /opt/pmwiki/wiki.d ${filter} \
17do 17do
18 base=`echo "${line}" | cut -d '.' -f 1` 18 base=`echo "${line}" | cut -d '.' -f 1`
19 file=`echo "${line}" | cut -d '.' -f 2` 19 file=`echo "${line}" | cut -d '.' -f 2`
20# page="?n=${line}"
21 mkdir -p PmWiki/$base 20 mkdir -p PmWiki/$base
22 mkdir -p combined/$base 21 mkdir -p combined/$base
23 echo "Converting ${URL}/?n=${base}.${file}?action=print -> PmWiki/${base}/${file}.md" 22 echo "Converting ${URL}/?n=${base}.${file}?action=print -> PmWiki/${base}/${file}.md"
24# pandoc -f html -t markdown --self-contained ${URL}/?n=${base}.${file} >PmWiki/${base}/${file}.md
25 # TODO - try curl, to see what is actually downloaded, and maybe not download unchanged pages. curl to .HTM
26 # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. 23 # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time.
27# curl --no-progress-meter ${URL}/?n=${base}.${file}?action=markdown -o PmWiki/${base}/${file}.MD 24# curl --no-progress-meter ${URL}/?n=${base}.${file}?action=markdown -o PmWiki/${base}/${file}.MD
28 curl --no-progress-meter ${URL}/?n=${base}.${file}?action=print -o PmWiki/${base}/${file}.HTM 25 curl --no-progress-meter ${URL}/?n=${base}.${file}?action=print -o PmWiki/${base}/${file}.HTM
@@ -45,11 +42,9 @@ do
45 -e "s/class='vspace'//g" \ 42 -e "s/class='vspace'//g" \
46 -e "s/class='wikilink'//g" \ 43 -e "s/class='wikilink'//g" \
47 -e "s/style='.*;'//g" 44 -e "s/style='.*;'//g"
48# -e "s/style='background-color: #.*;'//g" \
49# -e "s/style='font-size: .*;'//g"
50 45
51 pandoc -f html -t commonmark_x --self-contained PmWiki//${base}/${file}.HTM >PmWiki/${base}/${file}.md
52# pandoc -f markdown -t commonmark_x --self-contained PmWiki//${base}/${file}.MD >PmWiki/${base}/${file}.md 46# pandoc -f markdown -t commonmark_x --self-contained PmWiki//${base}/${file}.MD >PmWiki/${base}/${file}.md
47 pandoc -f html -t commonmark_x --self-contained PmWiki//${base}/${file}.HTM >PmWiki/${base}/${file}.md
53 cp PmWiki/${base}/${file}.md PmWiki/${base}/${file}.md_ORIGINAL 48 cp PmWiki/${base}/${file}.md PmWiki/${base}/${file}.md_ORIGINAL
54 49
55 # Attempt to clean things up, badly. 50 # Attempt to clean things up, badly.
@@ -68,10 +63,6 @@ do
68 63
69 # Don't need this, the parts we are grabbing already include that link at the bottom. 64 # Don't need this, the parts we are grabbing already include that link at the bottom.
70# echo -e "****\n[Original page](${URL}/${base}/${page}) where maybe you can edit it." >> PmWiki/${base}/${file}.md 65# echo -e "****\n[Original page](${URL}/${base}/${page}) where maybe you can edit it." >> PmWiki/${base}/${file}.md
71
72# pandoc -t html -f commonmark_x --self-contained PmWiki/${base}/${file}.md > PmWiki/${base}/${file}.htm
73# cmark-gfm -t html -e footnotes -e table -e strikethrough PmWiki/${base}/${file}.md > PmWiki/${base}/${file}.body
74# ln -frs PmWiki/${base}/${file}.body combined/${base}/${file}.body
75 ln -frs PmWiki/${base}/${file}.md combined/${base}/${file}.md 66 ln -frs PmWiki/${base}/${file}.md combined/${base}/${file}.md
76 67
77 if [ -f xx01 ]; then 68 if [ -f xx01 ]; then