From d1e2966ab8d3fd38edfa3e50fae8e26601ef1902 Mon Sep 17 00:00:00 2001 From: dvs1 Date: Mon, 20 Jan 2025 14:50:57 +1000 Subject: Clean up the cleaning up --- SuckItFos | 16 +++++++++------- SuckItPm | 30 ++++++++++++++++-------------- TODO.md | 10 +++++----- 3 files changed, 30 insertions(+), 26 deletions(-) diff --git a/SuckItFos b/SuckItFos index b59f6b0..11adaf9 100755 --- a/SuckItFos +++ b/SuckItFos @@ -10,7 +10,7 @@ filter=" -name TWiki -prune -o \ " -pushd /opt/merged +pushd /opt/mergedWork find /opt/Foswiki/data ${filter} \ -name "*.txt" -type f,l -printf "%P\n" | while read line @@ -26,7 +26,6 @@ do # TODO - try curl, to see what is actually downloaded, and maybe not download unchanged pages. curl to .HTM # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. curl --silent --no-progress-meter ${URL}/${base}/${file}?cover=print -o Foswiki/${base}/${file}.HTM - cp Foswiki/${base}/${file}.HTM Foswiki/${base}/${file}.HTM_ORIGINAL csplit -ks Foswiki/${base}/${file}.HTM '%