diff options
Diffstat (limited to 'SuckIt')
-rwxr-xr-x | SuckIt | 69 |
1 files changed, 69 insertions, 0 deletions
@@ -0,0 +1,69 @@ | |||
1 | #!/bin/bash | ||
2 | |||
3 | TIMEFORMAT=" took %lR using %P%% CPU" | ||
4 | time { | ||
5 | pushd /opt/merged | ||
6 | |||
7 | rm -fr Foswiki/* | ||
8 | cp -r /opt/merged_EMPTY/Foswiki . | ||
9 | rm -fr PmWiki/* | ||
10 | cp -r /opt/merged_EMPTY/PmWiki . | ||
11 | |||
12 | |||
13 | filter=" | ||
14 | -name _default -prune -o \ | ||
15 | -name _empty -prune -o \ | ||
16 | -name System -prune -o \ | ||
17 | -name Trash -prune -o \ | ||
18 | -name TWiki -prune -o \ | ||
19 | " | ||
20 | URL="https://fos.wiki.devuan.org" | ||
21 | time find /opt/Foswiki/data ${filter} \ | ||
22 | -name "*.txt" -type f,l -printf "%P\n" | while read line | ||
23 | do | ||
24 | base=`echo "${line}" | cut -d '/' -f 1` | ||
25 | file=`echo "${line}" | cut -d '/' -f 2- | rev | cut -b 5- | rev` | ||
26 | if [[ ! $file =~ (AdminGroup|AdminUser|AdminUserLeftBar|EditorGroup|GroupTemplate|GroupViewTemplate|NobodyGroup|PatternSkinUserViewTemplate|ProjectContributor|RegistrationAgent|SitePreferences|UnprocessedRegistrations|UnprocessedRegistrationsLog|UserHomepageHeader|UserList|UserListByDateJoined|UserListByLocation|UserList|UserListHeader|WebAtom|WebChanges|WebCreateNewTopic|WebHome|WebIndex|WebLeftBar|WebLeftBarExample|WebNotify|WebPreferences|WebRss|WebSearch|WebSearchAdvanced|WebTopicList|WikiGroups|WikiUsers)$ ]]; then | ||
27 | time=`date --rfc-3339=seconds -ur /opt/Foswiki/data/${base}/${file}.txt | cut -d '+' -f 1` | ||
28 | mkdir -p Foswiki/$base | ||
29 | mkdir -p Foswiki/${base}/`dirname ${file}` | ||
30 | echo -e "ogWiki=Foswiki\nogURL=${URL}\nogBase=${base}\nogFile=${file}\ntimestamp=${time}\n" > Foswiki/${base}/${file}.md.md | ||
31 | echo "downloading ${URL}/${base}/${file}?cover=print" | ||
32 | # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. | ||
33 | curl --silent --no-progress-meter ${URL}/${base}/${file}?cover=print -o Foswiki/${base}/${file}.HTM | ||
34 | fi | ||
35 | done | ||
36 | |||
37 | |||
38 | filter=" | ||
39 | -not -name "*~" -a \ | ||
40 | -not -name ".flock" -a \ | ||
41 | -not -name ".htaccess" -a \ | ||
42 | -not -name ".lastmod" -a \ | ||
43 | -not -name ".pageindex" -a \ | ||
44 | " | ||
45 | URL="https://wiki.devuan.org" | ||
46 | time find /opt/pmwiki/wiki.d ${filter} \ | ||
47 | -name "*.*" -type f,l -printf "%P\n" | while read line | ||
48 | do | ||
49 | base=`echo "${line}" | cut -d '.' -f 1` | ||
50 | if [[ "${base}" != "Site" ]]; then | ||
51 | file=`echo "${line}" | cut -d '.' -f 2` | ||
52 | time=`date --rfc-3339=seconds -ur /opt/pmwiki/wiki.d/${base}.${file} | cut -d '+' -f 1` | ||
53 | mkdir -p PmWiki/$base | ||
54 | echo -e "ogWiki=PmWiki\nogURL=${URL}\nogBase=${base}\nogFile=${file}\ntimestamp=${time}\n" > PmWiki/${base}/${file}.md.md | ||
55 | # echo "downloading ${URL}/?n=${base}.${file}?action=markdown" | ||
56 | # curl --no-progress-meter ${URL}/?n=${base}.${file}?action=markdown -o PmWiki/${base}/${file}.MARKDOWN | ||
57 | echo "downloading ${URL}/?n=${base}.${file}?action=print" | ||
58 | curl --no-progress-meter ${URL}/?n=${base}.${file}?action=print -o PmWiki/${base}/${file}.HTM | ||
59 | |||
60 | # pandoc -f markdown -t commonmark_x --self-contained PmWiki//${base}/${file}.MD >PmWiki/${base}/${file}.md | ||
61 | # pandoc -f html -t commonmark_x --self-contained PmWiki//${base}/${file}.HTM >PmWiki/${base}/${file}.md | ||
62 | fi | ||
63 | done | ||
64 | |||
65 | |||
66 | time notYetAnotherWiki.lua | ||
67 | |||
68 | popd | ||
69 | } | ||