From 23854d8283e200edebc8d9105e2bc4822daaff22 Mon Sep 17 00:00:00 2001
From: dvs1
Date: Fri, 14 Mar 2025 12:06:59 +1000
Subject: Include Foswiki/Sandbox in unsorted.
---
SuckIt | 18 +++++++++++++++++-
1 file changed, 17 insertions(+), 1 deletion(-)
(limited to 'SuckIt')
diff --git a/SuckIt b/SuckIt
index 8950850..a544f48 100755
--- a/SuckIt
+++ b/SuckIt
@@ -41,6 +41,7 @@ do
# Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time.
curl --silent --no-progress-meter ${ogURL}/${base}/${file}?cover=print -o ${ogWiki}/${base}/${file}.HTM
# Attempt to separate user profiles from user content. Doesn't work when people turn their profiles into content.
+ dest=""
if [[ "${base}" == "Main" ]]; then
dest="unsorted"
mkdir -p `dirname users/${file}`
@@ -48,8 +49,23 @@ do
if [ -s users/${file}_fos.SED ]; then
dest="users"
fi
- rm users/${file}_fos.SED
+ rm users/${file}_fos.SED >/dev/null 2>&1
rm -d `dirname users/${file}` >/dev/null 2>&1
+ fi
+ # "Devuan" is only two pages that get sorted. "Sandbox" is a mixture of standard examples, stuff that was copied to PmWiki, and other things that should get unsorted.
+ # Skipping anything with "UnknownUser".
+ if [[ "${base}" == "Sandbox" ]]; then
+ dest="unsorted"
+ mkdir -p `dirname users/${file}`
+ sed -i -E ${ogWiki}/${base}/${file}.HTM -e "s%UnknownUser%%w users/${file}_fos.SED"
+ if [ -s users/${file}_fos.SED ]; then
+ dest=""
+ fi
+ rm users/${file}_fos.SED >/dev/null 2>&1
+ rm -d `dirname users/${file}` >/dev/null 2>&1
+ fi
+
+ if [[ "${dest}" != "" ]]; then
mkdir -p `dirname ${dest}/${file}`
realURL=${dest}/${file}
echo -e "ogWiki=${ogWiki}\nogURL=${ogURL}\nrealURL=${realURL}_fos\nogBase=${base}\nogFile=${file}\ntimestamp=${time}\n" > ${ogWiki}/${base}/${file}.md.md
--
cgit v1.1