#!/bin/bash URL="https://fos.wiki.devuan.org" filter=" -name _default -prune -o \ -name _empty -prune -o \ -name System -prune -o \ -name Trash -prune -o \ -name TWiki -prune -o \ " pushd /opt/merged find /opt/Foswiki/data ${filter} \ -name "*.txt" -type f,l -printf "%P\n" | while read line do base=`echo "${line}" | cut -d '/' -f 1` file=`echo "${line}" | cut -d '/' -f 2- | rev | cut -b 5- | rev` mkdir -p Foswiki/$base mkdir -p Foswiki/${base}/`dirname ${file}` mkdir -p combined/$base mkdir -p combined/${base}/`dirname ${file}` echo "Converting ${URL}/${base}/${file} -> Foswiki/${base}/${file}.md" # pandoc -f html -t markdown --self-contained ${URL}/${base}/${file} >Foswiki/${base}/${file}.md # TODO - try curl, to see what is actually downloaded, and maybe not download unchanged pages. curl to .HTM # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. curl --silent --no-progress-meter ${URL}/${base}/${file} -o Foswiki/${base}/${file}.HTM pandoc -f html -t commonmark_x --self-contained Foswiki//${base}/${file}.HTM >Foswiki/${base}/${file}.md ln -frs Foswiki/${base}/${file}.md combined/${base}/${file}.md cp Foswiki/${base}/${file}.md Foswiki/${base}/${file}.md_ORIGINAL csplit -ks Foswiki/${base}/${file}.md '%::: {.foswikiTopic}%' '/::: {.foswikiContentFooter}/' if [ -f xx00 ]; then rm Foswiki/${base}/${file}.md mv xx00 Foswiki/${base}/${file}.md fi # Attempt to clean things up, badly. sed -i -E Foswiki/${base}/${file}.md \ -e 's/\$/\$dlr\$/g' \ -e 's/\{#.*\}//g' \ -e 's/\{\.foswiki.*\}//g' \ -e 's/\{\.foswiki.*//g' \ -e 's/\{\.foswikiNewLink rel=“nofollow”\}//g' \ -e 's/\{\.foswikiNewLink$//g' \ -e 's/^\.foswiki.*\}//g' \ -e 's/\{\.pattern.*\}//g' \ -e 's/\{\.pattern.*//g' \ -e 's/\{rel="nofollow"\}//g' \ -e 's/^rel="nofollow"\}//g' \ -e 's/rel=“nofollow”\}$//g' \ -e '/^:::/d' echo "

Original page where you can edit it.

" >> Foswiki/${base}/${file}.md done notYetAnotherWiki.lua popd