diff options
-rw-r--r-- | .md.md | 2 | ||||
-rwxr-xr-x | SuckItFos | 9 | ||||
-rwxr-xr-x | SuckItPm | 12 | ||||
-rw-r--r-- | TODO.md | 8 | ||||
-rw-r--r-- | default.template | 1 | ||||
-rwxr-xr-x | notYetAnotherWiki.lua | 206 |
6 files changed, 146 insertions, 92 deletions
@@ -1,3 +1,5 @@ | |||
1 | favicon=nYAW_icon.png | ||
2 | logo=nYAW.png | ||
1 | sourcecode=https://sledjhamr.org/cgit/notYetAnotherWiki/ | 3 | sourcecode=https://sledjhamr.org/cgit/notYetAnotherWiki/ |
2 | pagehistory=https://sledjhamr.org/cgit/notYetAnotherWiki/log | 4 | pagehistory=https://sledjhamr.org/cgit/notYetAnotherWiki/log |
3 | feedatom=https://sledjhamr.org/cgit/notYetAnotherWiki/atom | 5 | feedatom=https://sledjhamr.org/cgit/notYetAnotherWiki/atom |
@@ -22,6 +22,7 @@ do | |||
22 | mkdir -p combined/$base | 22 | mkdir -p combined/$base |
23 | mkdir -p combined/${base}/`dirname ${file}` | 23 | mkdir -p combined/${base}/`dirname ${file}` |
24 | echo "Converting ${URL}/${base}/${file}?cover=print -> Foswiki/${base}/${file}.md" | 24 | echo "Converting ${URL}/${base}/${file}?cover=print -> Foswiki/${base}/${file}.md" |
25 | echo -e "ogWiki=Foswiki\nogURL=${URL}/${base}/${file}\n" > Foswiki/${base}/${file}.md.md | ||
25 | # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. | 26 | # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. |
26 | curl --silent --no-progress-meter ${URL}/${base}/${file}?cover=print -o Foswiki/${base}/${file}.HTM | 27 | curl --silent --no-progress-meter ${URL}/${base}/${file}?cover=print -o Foswiki/${base}/${file}.HTM |
27 | cp Foswiki/${base}/${file}.HTM Foswiki/${base}/${file}.HTM_ORIGINAL | 28 | cp Foswiki/${base}/${file}.HTM Foswiki/${base}/${file}.HTM_ORIGINAL |
@@ -47,6 +48,9 @@ do | |||
47 | rm Foswiki/${base}/${file}.md | 48 | rm Foswiki/${base}/${file}.md |
48 | mv xx00 Foswiki/${base}/${file}.md | 49 | mv xx00 Foswiki/${base}/${file}.md |
49 | fi | 50 | fi |
51 | if [ -f xx01 ]; then | ||
52 | rm xx01 | ||
53 | fi | ||
50 | 54 | ||
51 | # Attempt to clean things up, badly. | 55 | # Attempt to clean things up, badly. |
52 | sed -i -E Foswiki/${base}/${file}.md \ | 56 | sed -i -E Foswiki/${base}/${file}.md \ |
@@ -57,12 +61,7 @@ do | |||
57 | # -e 's/\{\.pattern.*\}//g' \ | 61 | # -e 's/\{\.pattern.*\}//g' \ |
58 | # -e 's/\{\.pattern.*//g' \ | 62 | # -e 's/\{\.pattern.*//g' \ |
59 | 63 | ||
60 | echo -e "****\n[Original page](${URL}/${base}/${file}) where maybe you can edit it." >> Foswiki/${base}/${file}.md | ||
61 | ln -frs Foswiki/${base}/${file}.md combined/${base}/${file}.md | 64 | ln -frs Foswiki/${base}/${file}.md combined/${base}/${file}.md |
62 | |||
63 | if [ -f xx01 ]; then | ||
64 | rm xx01 | ||
65 | fi | ||
66 | done | 65 | done |
67 | 66 | ||
68 | popd | 67 | popd |
@@ -20,6 +20,7 @@ do | |||
20 | mkdir -p PmWiki/$base | 20 | mkdir -p PmWiki/$base |
21 | mkdir -p combined/$base | 21 | mkdir -p combined/$base |
22 | echo "Converting ${URL}/?n=${base}.${file}?action=print -> PmWiki/${base}/${file}.md" | 22 | echo "Converting ${URL}/?n=${base}.${file}?action=print -> PmWiki/${base}/${file}.md" |
23 | echo -e "ogWiki=PmWiki\nogURL=${URL}/?n=${base}.${file}\n" > PmWiki/${base}/${file}.md.md | ||
23 | # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. | 24 | # Doesn't help with redownloads, coz natch a dynamic site isn't cached. But I can at least comment out the curl command during testing to save time. |
24 | # curl --no-progress-meter ${URL}/?n=${base}.${file}?action=markdown -o PmWiki/${base}/${file}.MD | 25 | # curl --no-progress-meter ${URL}/?n=${base}.${file}?action=markdown -o PmWiki/${base}/${file}.MD |
25 | curl --no-progress-meter ${URL}/?n=${base}.${file}?action=print -o PmWiki/${base}/${file}.HTM | 26 | curl --no-progress-meter ${URL}/?n=${base}.${file}?action=print -o PmWiki/${base}/${file}.HTM |
@@ -29,6 +30,9 @@ do | |||
29 | rm PmWiki/${base}/${file}.HTM | 30 | rm PmWiki/${base}/${file}.HTM |
30 | mv xx00 PmWiki/${base}/${file}.HTM | 31 | mv xx00 PmWiki/${base}/${file}.HTM |
31 | fi | 32 | fi |
33 | if [ -f xx01 ]; then | ||
34 | rm xx01 | ||
35 | fi | ||
32 | sed -i -E PmWiki/${base}/${file}.HTM \ | 36 | sed -i -E PmWiki/${base}/${file}.HTM \ |
33 | -e "s/rel='nofollow'//g" \ | 37 | -e "s/rel='nofollow'//g" \ |
34 | -e "s/target='_blank'//g" \ | 38 | -e "s/target='_blank'//g" \ |
@@ -53,7 +57,6 @@ do | |||
53 | -e 's/\{#.*\}//g' \ | 57 | -e 's/\{#.*\}//g' \ |
54 | -e '/^:::/d' \ | 58 | -e '/^:::/d' \ |
55 | # -e '/\[Site$/d' \ | 59 | # -e '/\[Site$/d' \ |
56 | # -e '/^Page last modified on /d' \ | ||
57 | # -e '/^\[\]/d' \ | 60 | # -e '/^\[\]/d' \ |
58 | # -e "s/\`<a id='trailstart'>\`\{=html\}\`<\/a>\`\{=html\}//g" \ | 61 | # -e "s/\`<a id='trailstart'>\`\{=html\}\`<\/a>\`\{=html\}//g" \ |
59 | # -e "s/^\`<img /<img /g" \ | 62 | # -e "s/^\`<img /<img /g" \ |
@@ -61,13 +64,8 @@ do | |||
61 | # -e "s/^\`\`\`//g" \ | 64 | # -e "s/^\`\`\`//g" \ |
62 | # -e "s/\`\{=html\}//g" | 65 | # -e "s/\`\{=html\}//g" |
63 | 66 | ||
64 | # Don't need this, the parts we are grabbing already include that link at the bottom. | ||
65 | # echo -e "****\n[Original page](${URL}/${base}/${page}) where maybe you can edit it." >> PmWiki/${base}/${file}.md | ||
66 | ln -frs PmWiki/${base}/${file}.md combined/${base}/${file}.md | 67 | ln -frs PmWiki/${base}/${file}.md combined/${base}/${file}.md |
67 | 68 | ln -frs PmWiki/${base}/${file}.md.md combined/${base}/${file}.md.md | |
68 | if [ -f xx01 ]; then | ||
69 | rm xx01 | ||
70 | fi | ||
71 | done | 69 | done |
72 | 70 | ||
73 | popd | 71 | popd |
@@ -6,26 +6,27 @@ Bugs - | |||
6 | - /users/Plentyn/WebHome.HTML has that twisty thing which looks not simple to remove. | 6 | - /users/Plentyn/WebHome.HTML has that twisty thing which looks not simple to remove. |
7 | - /users/dunno/Devuan Cluster.HTML A very lengthy and complex document, I'll likely miss something, but chip away at the obvious. | 7 | - /users/dunno/Devuan Cluster.HTML A very lengthy and complex document, I'll likely miss something, but chip away at the obvious. |
8 | - /users/Debdog.HTML pandoc can't handle the background table cell colours in the "Background colours" table, which is kinda the point of it. | 8 | - /users/Debdog.HTML pandoc can't handle the background table cell colours in the "Background colours" table, which is kinda the point of it. |
9 | - PmWiki in it's current config needs that ?n=foo.bar nonsense for the Original page link. Which I'm currently neatly sidestepping, the scraped page has a similar thing. | ||
10 | 9 | ||
11 | Check the timestamps on the files, only update if source is newer than destination. Meh, it's already 600 times faster than the pandoc version. | 10 | Check the timestamps on the files, only update if source is newer than destination. Meh, it's already 600 times faster than the pandoc version. |
11 | |||
12 | - One quirk to watch for is if a URL path changes, the docs that have that URL need to be redone. | 12 | - One quirk to watch for is if a URL path changes, the docs that have that URL need to be redone. |
13 | - pandoc is a lot slower though, so do this for sure when dealing with that. | 13 | - pandoc is a lot slower though, so do this for sure when dealing with that. |
14 | - When scraping the web sites, they tend to be dynamically generated with no useful timestamp on them. | 14 | - When scraping the web sites, they tend to be dynamically generated with no useful timestamp on them. |
15 | - The web site scrape happens locally anyway, I can compare source file timestamps. | 15 | - The web site scrape happens locally anyway, I can compare source file timestamps. |
16 | 16 | ||
17 | Add atom feed for single page. Alas cgit only seems to have ATOM feed on the whole repo, not individual files. | 17 | Add atom feed for single page. Alas cgit only seems to have ATOM feed on the whole repo, not individual files. |
18 | |||
18 | - git.devuan.org might have usable per page history. | 19 | - git.devuan.org might have usable per page history. |
19 | - However, once timestamps are sorted, I can use that code to generate RSS and ATOM feeds, and create page histories using diffs. | 20 | - However, once timestamps are sorted, I can use that code to generate RSS and ATOM feeds, and create page histories using diffs. |
20 | 21 | ||
21 | Deal with complex directory trees. | 22 | Deal with complex directory trees. |
23 | |||
22 | - /testing/even should display as even/deeper on the testing page, coz even/ has no files, but even/deeper does. | 24 | - /testing/even should display as even/deeper on the testing page, coz even/ has no files, but even/deeper does. |
23 | - On the other hand, I could just create an empty page with the sub directories shown as usual. | 25 | - On the other hand, I could just create an empty page with the sub directories shown as usual. |
24 | - Scanning /usr/share/doc on my super desktop with looots of software installed will be fun. | 26 | - Scanning /usr/share/doc on my super desktop with looots of software installed will be fun. |
25 | - On the gripping hand, this is where we want empty directories to vanish. | 27 | - On the gripping hand, this is where we want empty directories to vanish. |
26 | 28 | ||
27 | 29 | ||
28 | |||
29 | ## Maybe not | 30 | ## Maybe not |
30 | 31 | ||
31 | Extract title from Fos and Pm, maybe even pagetitle if possible. Title is pointless, both of them just use the file name. | 32 | Extract title from Fos and Pm, maybe even pagetitle if possible. Title is pointless, both of them just use the file name. |
@@ -34,12 +35,13 @@ Extract title from Fos and Pm, maybe even pagetitle if possible. Title is point | |||
34 | 35 | ||
35 | ## Try out | 36 | ## Try out |
36 | 37 | ||
38 | Lua pattern matching sucks in general. Might be why people use lpeg instead. Try it. | ||
39 | |||
37 | htmx | 40 | htmx |
38 | 41 | ||
39 | cgit has Lua | 42 | cgit has Lua |
40 | 43 | ||
41 | 44 | ||
42 | |||
43 | ## User system | 45 | ## User system |
44 | 46 | ||
45 | Reuse the user system from SledjChisl. | 47 | Reuse the user system from SledjChisl. |
diff --git a/default.template b/default.template index 9f2490b..bedfdcf 100644 --- a/default.template +++ b/default.template | |||
@@ -2,7 +2,6 @@ | |||
2 | <html> | 2 | <html> |
3 | <head> | 3 | <head> |
4 | <meta charset="utf-8"> | 4 | <meta charset="utf-8"> |
5 | <meta name="generator" contents="lcmark"> | ||
6 | <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes"> | 5 | <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes"> |
7 | <title>$title$</title> | 6 | <title>$title$</title> |
8 | <link rel="icon" type="image/png" href="$favicon$" /> | 7 | <link rel="icon" type="image/png" href="$favicon$" /> |
diff --git a/notYetAnotherWiki.lua b/notYetAnotherWiki.lua index 8c9a5c9..dc64fa2 100755 --- a/notYetAnotherWiki.lua +++ b/notYetAnotherWiki.lua | |||
@@ -64,10 +64,11 @@ local globalData = { | |||
64 | ['devuanDeepSeaDark'] = '#132f40', ['devuanDeepSeaLight'] = '#1a4562', | 64 | ['devuanDeepSeaDark'] = '#132f40', ['devuanDeepSeaLight'] = '#1a4562', |
65 | ['devuanSaphireDark'] = '#004489', ['devuanSaphireLight'] = '#00509f', | 65 | ['devuanSaphireDark'] = '#004489', ['devuanSaphireLight'] = '#00509f', |
66 | ['karenPurple'] = '#8800ff', ['onefangPurple'] = '#cc00ff', | 66 | ['karenPurple'] = '#8800ff', ['onefangPurple'] = '#cc00ff', |
67 | favicon = 'nYAW_icon.png', logo = 'nYAW.png', header = '', home = '', --menu = '', | 67 | favicon = 'nYAW_icon.png', logo = 'nYAW.png', |
68 | history = '', footer = 'Powered by <a href="https://sledjhamr.org/cgit/notYetAnotherWiki/about/">notYetAnotherWiki</a> version 0.0. ', | 68 | header = '', home = '', history = '', footer = 'Powered by <a href="https://sledjhamr.org/cgit/notYetAnotherWiki/about/">notYetAnotherWiki</a> version 0.0. ', |
69 | --menu = '', | ||
69 | } | 70 | } |
70 | local Sites, Files, Subs = {}, {}, {} | 71 | local Files, Subs = {}, {} |
71 | 72 | ||
72 | 73 | ||
73 | 74 | ||
@@ -86,7 +87,7 @@ local derefTable = function(t, strip) | |||
86 | end | 87 | end |
87 | 88 | ||
88 | 89 | ||
89 | -- String together the bits array into a path string. | 90 | -- String together the bits array into a path string. Or the other way around. lol |
90 | local stringBits = function(l) | 91 | local stringBits = function(l) |
91 | local bits = {} | 92 | local bits = {} |
92 | local last = 1 | 93 | local last = 1 |
@@ -102,7 +103,19 @@ end | |||
102 | 103 | ||
103 | -- Put a value into the Files or Subs table, creating things if needed. | 104 | -- Put a value into the Files or Subs table, creating things if needed. |
104 | local toFile = function(name, key, value) | 105 | local toFile = function(name, key, value) |
105 | if nil == Files[name] then Files[name] = {} end | 106 | if nil == Files[name] then |
107 | local bits, bit = stringBits(name) | ||
108 | local path = '' | ||
109 | Files[name] = {} | ||
110 | Files[name].bits = bits | ||
111 | Files[name].bit = bit | ||
112 | for i, d in ipairs(bits) do | ||
113 | if '' ~= path then path = path .. '/' end | ||
114 | path = path .. d | ||
115 | end | ||
116 | Files[name].path = path | ||
117 | -- Files[name].body = '' | ||
118 | end | ||
106 | if nil ~= key then Files[name][key] = value end | 119 | if nil ~= key then Files[name][key] = value end |
107 | for i, v in ipairs{'metadata', 'bits', } do | 120 | for i, v in ipairs{'metadata', 'bits', } do |
108 | if nil == Files[name][v] then Files[name][v] = {} end | 121 | if nil == Files[name][v] then Files[name][v] = {} end |
@@ -120,9 +133,15 @@ end | |||
120 | 133 | ||
121 | -- Actually start doing things. | 134 | -- Actually start doing things. |
122 | 135 | ||
136 | -- Create the base of everything.md here, so it gets picked up as usual in the file scan. | ||
137 | local body, h = '', io.open('everything.md', 'w') | ||
138 | h:write('# All the pages\n') | ||
139 | h:close() | ||
140 | |||
141 | |||
123 | -- Scan the subdirectories looking for .md files. | 142 | -- Scan the subdirectories looking for .md files. |
124 | local directory = arg[1] | 143 | local directory = arg[1] |
125 | toSub('') --Subs[''] = {files = {}, subs = {}, bits = {}} | 144 | toSub('') |
126 | if nil == directory then directory = '.' end | 145 | if nil == directory then directory = '.' end |
127 | if '.' ~= directory then | 146 | if '.' ~= directory then |
128 | for l in io.popen('find . -name "*.md" -type f,l -printf "%P\n"'):lines() do | 147 | for l in io.popen('find . -name "*.md" -type f,l -printf "%P\n"'):lines() do |
@@ -135,39 +154,60 @@ for l in io.popen('find ' .. directory .. ' -name "*.md" -type f,l -printf "%P\n | |||
135 | if nil == Files[n] then toFile(n) end | 154 | if nil == Files[n] then toFile(n) end |
136 | end | 155 | end |
137 | 156 | ||
138 | -- Gotta figure out all the files and subs first. File and sub metadata comes along for the ride. | 157 | |
158 | -- Gotta figure out all the files and subs first. File and sub metadata comes along for the ride, coz we need them later. | ||
159 | local newMeta = {} | ||
139 | for name, file in pairs(Files) do | 160 | for name, file in pairs(Files) do |
140 | local bitter, path = '', '' | 161 | local bitter, path = '', '' |
141 | local bits, bit = stringBits(name) | 162 | local bits, bit = file.bits, file.bit |
142 | local ln = #bits | 163 | local ln = #bits |
143 | local body, metadata = '', {} | 164 | local body, metadata = '', {} |
144 | 165 | ||
145 | -- Go through our bits, construct Subs with bits. | 166 | -- Go through our bits, construct Subs with bits. |
146 | Files[name].bits = bits | ||
147 | Files[name].bit = bit | ||
148 | if ln > 0 then bitter = bits[1] end | 167 | if ln > 0 then bitter = bits[1] end |
149 | if '' ~= bitter then Subs[''].subs[bitter] = bitter end -- "bitter end" was entirely by accident, I'm keeping it. B-) | 168 | if '' ~= bitter then Subs[''].subs[bitter] = bitter end -- "bitter end" was entirely by accident, I'm keeping it. B-) |
150 | for i, d in ipairs(bits) do | 169 | for i, d in ipairs(bits) do |
151 | if '' ~= path then path = path .. '/' end | 170 | if '' ~= path then path = path .. '/' end |
152 | path = path .. d | 171 | path = path .. d |
153 | toSub(path, 'bits', derefiTable(bits, true)) | 172 | toSub(path, 'bits', derefiTable(bits, true)) |
154 | if i < ln then Subs[path].subs[bits[i + 1]] = bits[i + 1] end | 173 | if i < ln then |
155 | if i < ln then table.remove(Subs[path].bits, #bits) end | 174 | Subs[path].subs[bits[i + 1]] = bits[i + 1] |
175 | table.remove(Subs[path].bits, #bits) | ||
176 | end | ||
156 | end | 177 | end |
157 | 178 | ||
158 | if '.md' == string.sub(name, -3, -1) then | 179 | if '.md' == string.sub(name, -3, -1) then |
159 | -- This is a metadata only file, no content, stash the matadata. | 180 | -- This is a metadata only file, no content, stash the matadata. |
160 | for l in io.open(name .. '.md'):lines() do | 181 | for l in io.open(name .. '.md'):lines() do |
161 | for k, v in string.gmatch(l, "(%w+)%s*=%s*(.+)") do metadata[k] = v end | 182 | for k, v in string.gmatch(l, "(%w+)%s*=%s*(.+)") do |
183 | if nil == v then | ||
184 | print(name .. ' ' .. k) | ||
185 | else | ||
186 | metadata[k] = v | ||
187 | end | ||
188 | end | ||
162 | end | 189 | end |
163 | if '.md' == name then toSub(path, 'metadata', metadata) | 190 | if '.md' == name then toSub(path, 'metadata', metadata) |
164 | elseif '/.md' == string.sub(name, -4, -1) then toSub(path, 'metadata', metadata) | 191 | elseif '/.md' == string.sub(name, -4, -1) then toSub(path, 'metadata', metadata) |
165 | else toFile(string.sub(name, 1, -4), 'metadata', metadata) | 192 | -- else toFile(string.sub(name, 1, -4), 'metadata', metadata) |
193 | else newMeta[string.sub(name, 1, -4)] = metadata | ||
166 | end | 194 | end |
167 | Files[name] = nil | 195 | Files[name] = nil |
168 | else | 196 | end |
197 | end | ||
169 | 198 | ||
170 | -- Start the file parsing here, coz we need it's metadata. | 199 | -- FIXTHEM - Lua doesn't like modifying the thing you are pair()ing, like we want to do in the last loop. |
200 | for name, file in pairs(newMeta) do | ||
201 | if nil == Files[name] then toFile(name) end | ||
202 | if nil == Files[name].metadata then Files[name].metadata = {} end | ||
203 | for k, v in pairs(file) do | ||
204 | if nil == Files[name].metadata[k] then Files[name].metadata[k] = v end | ||
205 | end | ||
206 | end | ||
207 | |||
208 | -- Open the files and do the iniital cleanups. | ||
209 | for name, file in pairs(Files) do | ||
210 | if '.md' ~= string.sub(name, -3, -1) then | ||
171 | -- print('Parsing ' .. name .. '.md') | 211 | -- print('Parsing ' .. name .. '.md') |
172 | h = io.open(name .. '.md', 'r') | 212 | h = io.open(name .. '.md', 'r') |
173 | if nil ~= h then | 213 | if nil ~= h then |
@@ -201,13 +241,31 @@ for name, file in pairs(Files) do | |||
201 | end | 241 | end |
202 | until fail == f0 | 242 | until fail == f0 |
203 | result = result .. string.sub(body, start) | 243 | result = result .. string.sub(body, start) |
244 | body = result | ||
245 | end | ||
246 | Files[name].body = body | ||
247 | table.insert(Subs[Files[name].path].files, Files[name].bit) | ||
248 | end | ||
249 | end | ||
204 | 250 | ||
205 | body = parse(result) | 251 | -- Create an "everything" page, for URL links to every file.HTML. |
252 | -- TODO - sort the list of pages. | ||
253 | local bdy, h = Files['everything'].body, io.open('everything.md', 'a+') | ||
254 | bdy = bdy .. '\n\n| page | converted | Foswiki | PmWiki | \n| ---- | --------- | ------- | ------ | ' | ||
255 | for name, file in pairs(Files) do | ||
256 | local metadata = derefTable(Files[name].metadata, true) | ||
257 | if 'everything' ~= name then | ||
258 | local ln, fw, pw = 'DUNNO', '', '' | ||
259 | if 'PmWiki' == metadata.ogWiki then pw = '[' .. metadata.ogURL .. '](' .. metadata.ogURL .. ')' end | ||
260 | if 'Foswiki' == metadata.ogWiki then fw = '[' .. metadata.ogURL .. '](' .. metadata.ogURL .. ')' end | ||
261 | if nil ~= file.bit then ln = file.bit | ||
206 | end | 262 | end |
207 | toFile(name, 'body', body) | 263 | bdy = bdy .. '\n| ' .. name .. ' | [' .. ln .. '](<' .. name .. '.HTML>) | ' .. fw .. ' | ' .. pw .. ' |' |
208 | table.insert(Subs[path].files, bit) | ||
209 | end | 264 | end |
210 | end | 265 | end |
266 | h:write(bdy) | ||
267 | h:close() | ||
268 | toFile('everything', 'body', parse(bdy)) | ||
211 | 269 | ||
212 | 270 | ||
213 | 271 | ||
@@ -252,16 +310,20 @@ local linkFrom = function(source, dest) | |||
252 | break | 310 | break |
253 | end | 311 | end |
254 | end | 312 | end |
255 | depth = #(Subs[source].bits) - depth | 313 | |
256 | depth = depth + 1 | 314 | if #(Subs[dest].bits) >= #(Subs[source].bits) then |
257 | link = string.rep('../', depth) | 315 | depth = #(Subs[source].bits) |
258 | if (0 == depth) or (depth > #(Subs[dest].bits)) then | ||
259 | for i, v in ipairs(Subs[dest].bits) do | 316 | for i, v in ipairs(Subs[dest].bits) do |
260 | if i >= depth then | 317 | if i > depth then |
261 | if '' ~= link then link = link .. '/' end | 318 | if '' ~= link then link = link .. '/' end |
262 | link = link .. Subs[dest].bits[i] | 319 | link = link .. Subs[dest].bits[i] |
263 | end | 320 | end |
264 | end | 321 | end |
322 | if '' ~= link then link = link .. '/' end | ||
323 | else | ||
324 | depth = #(Subs[source].bits) - depth | ||
325 | depth = depth + 1 | ||
326 | link = string.rep('../', depth) | ||
265 | end | 327 | end |
266 | end | 328 | end |
267 | return link | 329 | return link |
@@ -269,12 +331,28 @@ end | |||
269 | 331 | ||
270 | 332 | ||
271 | 333 | ||
272 | -- TODO - loop through Subs, doing whichPage and inheritance. | ||
273 | |||
274 | |||
275 | |||
276 | -- More of this actually doing things nonsense. | 334 | -- More of this actually doing things nonsense. |
277 | 335 | ||
336 | -- Loop through Subs, doing whichPage and inheritance. | ||
337 | for name, sub in pairs(Subs) do | ||
338 | sub.whichPage = whichPage(name) | ||
339 | local metadata = sub.metadata | ||
340 | for i, s in pairs(sub.subs) do | ||
341 | local nm = i | ||
342 | if '' ~= name then nm = name .. '/' .. i end | ||
343 | ss = Subs[nm] | ||
344 | for k, v in pairs(metadata) do | ||
345 | if nil == ss.metadata[k] then | ||
346 | if ('favicon' == k) or ('logo' == k) then | ||
347 | ss.metadata[k] = linkFrom(nm, name) .. v | ||
348 | else | ||
349 | ss.metadata[k] = v | ||
350 | end | ||
351 | end | ||
352 | end | ||
353 | end | ||
354 | end | ||
355 | |||
278 | -- Loop through the files we found and actually create their HTML files. | 356 | -- Loop through the files we found and actually create their HTML files. |
279 | for name, file in pairs(Files) do | 357 | for name, file in pairs(Files) do |
280 | local path, result = '', '' | 358 | local path, result = '', '' |
@@ -285,42 +363,31 @@ for name, file in pairs(Files) do | |||
285 | path = table.concat(bits, '/', 1, ln) | 363 | path = table.concat(bits, '/', 1, ln) |
286 | 364 | ||
287 | if '' ~= body then | 365 | if '' ~= body then |
288 | -- Copy any metadata found in parent directories. | 366 | -- Inherit stuff from sub and global. |
289 | local pth = '' | 367 | local mdata = Subs[path].metadata |
290 | for i, d in ipairs(bits) do | 368 | for k, v in pairs(mdata) do |
291 | if '' ~= pth then pth = pth .. '/' end | 369 | if nil == file.metadata[k] then |
292 | pth = pth .. d | 370 | file.metadata[k] = v |
293 | if nil ~= Subs[pth] then | ||
294 | if nil ~= Subs[pth].metadata then | ||
295 | for m, x in pairs(Subs[pth].metadata) do | ||
296 | if nil == metadata[m] then | ||
297 | metadata[m] = x | ||
298 | end | ||
299 | end | ||
300 | end | ||
301 | end | 371 | end |
302 | end | 372 | end |
303 | -- Root directory needs to be handled separately, for now. | 373 | Files[name].metadata = file.metadata |
304 | if nil ~= Subs[''].metadata then | 374 | metadata = derefTable(Files[name].metadata, true) |
305 | for m, x in pairs(Subs[''].metadata) do if nil == metadata[m] then metadata[m] = x end end | ||
306 | end | ||
307 | |||
308 | for m, x in pairs(globalData) do if nil == metadata[m] then metadata[m] = x end end | 375 | for m, x in pairs(globalData) do if nil == metadata[m] then metadata[m] = x end end |
309 | 376 | ||
310 | -- Inherit these images from most recent parent directory that defines them. | 377 | if nil ~= metadata.ogURL then |
311 | for n, y in ipairs{'favicon', 'logo'} do | 378 | body = body .. '\n\n---\n\n[Original page](' .. metadata.ogURL .. '), maybe you can edit it.\n' |
312 | local pith = '' | 379 | end |
313 | if nil ~= metadata[y] then | 380 | |
314 | local pth, found = '', false | 381 | -- Figure out this pages trail links. |
315 | if (nil ~= Subs[''].metadata) and (nil ~= Subs[''].metadata[y]) then pith = '' ; found = true end | 382 | metadata.home = linkFrom(path, '') .. Subs[''].whichPage |
316 | for m, x in ipairs(bits) do | 383 | metadata.trail = '' |
317 | if '' ~= pth then pth = pth .. '/' end | 384 | for i, b in ipairs(bits) do |
318 | pth = pth .. x | 385 | local p = table.concat(bits, '/', 1, i) |
319 | if (nil ~= Subs[pth].metadata) and (nil ~= Subs[pth].metadata[y]) then pith = pth ; found = true end | 386 | if i < #bits then |
320 | end | 387 | metadata.trail = metadata.trail .. '<a href="' .. linkFrom(path, p) .. Subs[p].whichPage .. '">' .. b .. '</a> 👣 ' |
321 | if found then metadata[y] = linkFrom(path, pith) .. metadata[y] | 388 | linkFrom(path, table.concat(bits, '/', 1, i)) |
322 | else metadata[y] = linkFrom(path, pith) .. globalData[y] | 389 | else |
323 | end | 390 | metadata.trail = metadata.trail .. b .. ' ' |
324 | end | 391 | end |
325 | end | 392 | end |
326 | 393 | ||
@@ -334,8 +401,7 @@ for name, file in pairs(Files) do | |||
334 | for i, f in ipairs(subs) do | 401 | for i, f in ipairs(subs) do |
335 | local pth = path | 402 | local pth = path |
336 | if '' ~= path then pth = path .. '/' end | 403 | if '' ~= path then pth = path .. '/' end |
337 | local fl = whichPage(pth .. f) | 404 | metadata.header = metadata.header .. '<a href="' .. f .. '/' .. whichPage(pth .. f) .. '">' .. f .. '</a> ' |
338 | metadata.header = metadata.header .. '<a href="' .. f .. '/' .. fl .. '">' .. f .. '</a> ' | ||
339 | end | 405 | end |
340 | 406 | ||
341 | -- Figure out this pages menu links. | 407 | -- Figure out this pages menu links. |
@@ -360,18 +426,6 @@ for name, file in pairs(Files) do | |||
360 | end | 426 | end |
361 | end | 427 | end |
362 | 428 | ||
363 | -- Figure out this pages trail links. | ||
364 | metadata.home = linkFrom(path, '') .. whichPage('') | ||
365 | metadata.trail = '' | ||
366 | for i, b in ipairs(bits) do | ||
367 | if i < #bits then | ||
368 | metadata.trail = metadata.trail .. '<a href="' .. linkFrom(path, table.concat(bits, '/', 1, i)) .. whichPage(b) .. '">' .. b .. '</a> 👣 ' | ||
369 | linkFrom(path, table.concat(bits, '/', 1, i)) | ||
370 | else | ||
371 | metadata.trail = metadata.trail .. b .. ' ' | ||
372 | end | ||
373 | end | ||
374 | |||
375 | -- Figure out this pages footer links. | 429 | -- Figure out this pages footer links. |
376 | if nil ~= metadata.pagehistory then metadata.history = '<p>Page <a href="' .. metadata.pagehistory .. '/' .. name .. '.md">history</a></p>' end | 430 | if nil ~= metadata.pagehistory then metadata.history = '<p>Page <a href="' .. metadata.pagehistory .. '/' .. name .. '.md">history</a></p>' end |
377 | if nil ~= metadata.sourcecode then metadata.footer = '<a href="' .. metadata.sourcecode .. '">source code</a> ' .. metadata.footer end | 431 | if nil ~= metadata.sourcecode then metadata.footer = '<a href="' .. metadata.sourcecode .. '">source code</a> ' .. metadata.footer end |
@@ -382,12 +436,12 @@ for name, file in pairs(Files) do | |||
382 | -- Do our own metadata replacement, it's simple and works better. | 436 | -- Do our own metadata replacement, it's simple and works better. |
383 | local temp = template | 437 | local temp = template |
384 | local start = 1 | 438 | local start = 1 |
385 | metadata.body = nil | ||
386 | local f0, f1, token | 439 | local f0, f1, token |
387 | -- Toss the body in first, so the scan can deal with it to. | 440 | -- Toss the body in first, so the scan can deal with it to. |
388 | f0, f1, token = string.find(temp, '%$(body)%$') | 441 | f0, f1, token = string.find(temp, '%$(body)%$') |
389 | if fail ~= f0 then | 442 | if fail ~= f0 then |
390 | temp = string.sub(temp, 1, f0 - 1) .. body .. string.sub(temp, f1 + 1) | 443 | -- NOTE - this is where we actually parse the markup into HTML. |
444 | temp = string.sub(temp, 1, f0 - 1) .. parse(body) .. string.sub(temp, f1 + 1) | ||
391 | end | 445 | end |
392 | -- The actual metadata replacement scan. | 446 | -- The actual metadata replacement scan. |
393 | result = '' | 447 | result = '' |