-
Notifications
You must be signed in to change notification settings - Fork 2
/
dig.ts
704 lines (622 loc) · 21.8 KB
/
dig.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
// Batch build and rebuild graphviz algorithmic diagrams as png for publication
// usage: sh seran-wiki.sh --allow-disclosure ../seran-dig/dig.ts
const { stat } = Deno;
import { readFileStr, writeFileStrSync, exists, existsSync } from "std/fs/mod.ts";
import * as wiki from "seran/wiki.ts";
import { asSlug } from "seran/wiki.ts";
import { ProcessStep } from "./step.ts";
import {
isAbsolute,
join,
basename,
normalize
} from "std/path/posix.ts";
export let plugins = [ "/client/process-step.mjs" ]
export let handler = new wiki.Handler()
handler.route("^/png/", (req) => {
let path = join('../seran-dig/data',req.url)
if (existsSync(path)) {
wiki.serveFile(req,'image/png',path)
return true
}
return false
})
handler.route("^/assets/", (req) => {
let path = join('../seran-dig',req.url)
if (existsSync(path)) {
wiki.serveFile(req,'text/html',path)
return true
}
return false
})
handler.route("^/route-", (req) => {
wiki.serveJson(req, simnet(req.url))
return true
})
handler.items("Welcome Visitors", [
"Welcome to this [[Seran Wiki]] Federation outpost.\
From this page you can find who we are and what we do.",
"Pages about us.",
"[[Ward Cunningham]]",
"Pages where we do and share.",
"[[DIG Handbook]]"
])
handler.items("DIG Handbook", [
"We consistency check the source possible translation problems.\
Then we bulk generate graphviz images for print publication as pdf.",
wiki.roster("Related Sites\n\ndig.wiki.innovateoregon.org\ndayton.fed.wiki\ndig.seran.c2.com\ndev2.wiki.innovateoregon.org\nthompson.wiki.innovateoregon.org\npath.ward.asia.wiki.org\nwellspring.fed.wiki\ncode.fed.wiki"),
"[[Handbook Source]] to be built",
"[[Run Batch Process]] when source changes",
"[[Diagrams Processed]] with lasting png files",
"[[Troubled Links]] that will be omitted",
"[[Troubled Pages]] to be corrected",
"[[Print to PDF]] when satisfied",
])
handler.items("Run Batch Process", [
"Press Start to rerun diagrams before pringing a new version.\
Return in a few minutes to confirm completion.",
wiki.item("process-step", { text: "Fetch and process all pages.", href: "/rebuild" }),
"Plugin needs help? Try this new client: \
[http://dig.seran.c2.com/index.html?page=welcome-visitors&page=dig-handbook site]",
])
handler.items("Print to PDF", [
"We use an html script to complete the assembly of text and images for printing.\
See console log for source of missing page references.",
"dig.seran.c2.com: [http://dig.seran.c2.com/assets/dig.html dig.html]",
"dig.localhost:8000: [http://dig.localhost:8000/assets/dig.html dig.html]"
])
handler.items("Print to PDF", [
"We use an html script to complete the assembly of text and images for printing.\
See console log for source of missing page references.",
"dig.seran.c2.com: [http://dig.seran.c2.com/assets/dig.html dig.html]",
"dig.localhost:8000: [http://dig.localhost:8000/assets/dig.html dig.html]",
"Here we have rewritten the pdf print to be much slimmer and easy to debug.",
"dig.seran.c2.com: [http://dig.seran.c2.com/assets/slim-dig.html slim-dig.html]",
"dig.localhost:8000: [http://dig.localhost:8000/assets/slim-dig.html slim-dig.html]",
"See also [[Links in JSON]], [[Pages in JSON]]"
])
function asmap (array) {
const bemap = (map, each) => {map[asSlug(each.title)] = each; return map}
return array.reduce(bemap, {})
}
function json (url) {
return fetch(url)
.then(res => res.json())
}
let site = 'https://dig.wiki.innovateoregon.org'
let sitemap = []
let pageinfo = {}
let pages = {}
let links = {}
let wrote = []
let skipped = []
let missing = []
let unreachable = []
let extras = []
let trouble = {}
let lastrun = new Date(0)
async function mkdir (path) {
if (!await exists(path)) {
console.log(`Creating: ${path}`)
await Deno.mkdir(path)
}
}
let dataDir = "../seran-dig/data"
await mkdir (`${dataDir}`)
await mkdir (`${dataDir}/dot`)
await mkdir (`${dataDir}/png`)
if (await exists(`${dataDir}/lastrun`)) {
let stat = await Deno.stat(`${dataDir}/lastrun`);
lastrun = new Date(stat.mtime.getTime() * 1000)
}
// L I V E R E P O R T S
handler.items('Handbook Source', async () => {
let sitemap = await json(`${site}/system/sitemap.json`)
return [
"Before we start we check to see if our source has been updated since our last build.",
"This Report As Of",
`${new Date()}`,
"Sitemap information.",
`[${site} ${site}]`,
`${sitemap.length} pages`,
"Last Site Update:",
`${new Date(sitemap.reduce((m,i) => Math.max(m,i.date),0))}`,
"Last Build Finished",
`${lastrun}`
]})
handler.items('Troubled Links', () => [
"Having fetched every page we then check that each is accessible by links from Welcome Visitors and that all links can be resolved by these pages.",
"Pages missing from source site",
missing.map(t=>`[[${t}]]`).join(', '),
"Pages unreachable from welcome page",
unreachable.map(t=>`[[${t}]]`).join(', '),
"Pages to be omitted as extras",
extras.map(t=>`[[${pages[t].title}]]`).join(', ')
])
handler.items('Diagrams Processed', () => [
"We run one of two versions of preview-next-diagram choosing left-right or top-bottom and save the resulting images as png.",
"Pages with diagrams",
wrote.map(t=>`[[${t}]]`).join(', '),
"Pages without diagrams",
skipped.map(t=>`[[${t}]]`).join(', ')
])
function troubled_pages (key) {
return [
key,
trouble[key].filter(title => !extras.includes(asSlug(title))).map(title => `[[${title}]]`).join(', ')
]
}
handler.items('Troubled Pages', () => {
return [
"We examine the markup of every referenced page and report things that look like trouble here.",
...Object.keys(trouble).flatMap(troubled_pages)
]
})
let linkmodel =
`[] MAX 1000
{} NODE Slug
[] MAX 20
link
`
handler.items('Links in JSON', () => [
"We export this data using the Json and Metamodel plugins.",
wiki.item('json', {text:'audited map of slugs to array of linked slugs', resource:links}),
wiki.item('metamodel', {text: linkmodel})
])
handler.items('Pages in JSON', () => {
let resource = []
pages['cover-png'] = {title:'Cover.png', insert:true, png:'http://path.ward.asia.wiki.org/assets/page/production-tools/Cover.png'}
if (Object.keys(links).length) {
let language = ['cover-png','welcome-visitors','dig-index',...links['dig-index']]
let garden = Object.keys(links).filter(link => !(language.includes(link)||extras.includes(link))).sort()
let text = garden.map(slug=>`[[${pages[slug].title}]]`).join("<br>\n")
pages['garden'] = {title:'Garden',story:[{type:'html',text}]}
let book = [language,'garden',garden].flat().filter(slug => !extras.includes(slug))
resource = book.map(slug => pages[slug])
}
return [
"We export partial pages in print order.",
wiki.item('json', {text:'ordered array of page titles and stories.', resource}),
wiki.item('metamodel', {text: linkmodel})
]}
)
// http://path.ward.asia.wiki.org/assets/page/production-tools/images/designed-ingenuity-dig.png
let text =
`DOT strict digraph
rankdir=TB
node [style=filled fillcolor=white penwidth=5 color=black]
HERE NODE
node [style=filled fillcolor=white]
WHERE /^Next/
LINKS HERE -> NODE
node [style=filled fillcolor=white]
HERE NODE
WHERE /^Next/
LINKS HERE -> NODE
node [style=filled fillcolor=white penwidth=3 color=black]
LINKS HERE -> NODE
node [style=filled fillcolor=white penwidth=1 color=black]
HERE NODE
LINKS HERE -> NODE`
// (cd data
// for i in *
// do echo $i
// cat $i | \
// dot -Tpng | \
// ssh asia 'cat > .wiki/path.ward.asia.wiki.org/assets/page/production-tools/images/'$i'.png'
// done)
let rebuild = new ProcessStep('rebuild', false, build).register(handler)
async function build () {
links = {}
wrote = []
skipped = []
missing = []
unreachable = []
extras = []
trouble = {}
lastrun = new Date()
sitemap = await json(`${site}/system/sitemap.json`)
try {
await rebuild.step(`fetch ${sitemap.length} pages`)
pageinfo = asmap(sitemap)
pages = await Promise.all(sitemap.map(each => json(`${site}/${each.slug}.json`)))
.then(all => {return asmap(all.map(p => ({title:p.title, story:p.story})))})
await rebuild.step(`render diagrams as png`)
for (let slug in pages) {
let page = pages[slug]
let graphviz = page.story.find(i => i.type == 'graphviz')
if (graphviz) {
let markup = graphviz.text.match(/tall/) ? text.replace('TB','LR') : text
let dot = await makedot(page, {type:'graphviz', text:markup})
writeFileStrSync(`${dataDir}/dot/${slug}.dot`, dot)
let proc = Deno.run({cmd:["dot","-Tpng", `${dataDir}/dot/${slug}.dot`,`-o${dataDir}/png/${slug}.png`]})
let status = await proc.status()
if (!status.success) console.log('dot',slug,status)
wrote.push(page.title)
} else {
skipped.push(page.title)
}
}
await rebuild.step('upload png files')
try {
let pubsite='path.ward.asia.wiki.org'
let assets='page/production-tools/images'
let proc2 = Deno.run({cmd:["rsync", "-avz", `${dataDir}/png/`, `asia:.wiki/${pubsite}/assets/${assets}/`]})
let status2 = await proc2.status()
if (!status2.success) console.log('rsync',status2)
} catch (e) {
console.log('rsync catch', e)
}
await rebuild.step('find missing or unreachable titles')
let more = ["Welcome Visitors"]
let done = []
function visit(title) {
if (!done.includes(title)) {
done.push(title)
let slug = asSlug(title)
let page = pages[slug]
const tally = (m) => {
trouble[m]=trouble[m]||[];
if(!trouble[m].includes(title)) trouble[m].push(title)
}
const resolve = (item) => {
for (let link of (item.text||'').matchAll(/\[\[(.+?)\]\]/g)) {
if (!done.includes(link[1])) {
more.push(link[1])
}
if (pageinfo[asSlug(link[1])]) {
links[slug].push(asSlug(link[1]))
} else {
tally(`Links to missing '${link[1]}' omitted`)
}
}
}
if (page) {
links[slug] = []
for (let item of page.story||[]) {
if (['paragraph','markdown'].includes(item.type)) {
resolve(item)
} else if (['html'].includes(item.type)) {
for (let tag of item.text.matchAll(/<([A-Za-z]+)\b/g)) {
tally(`Pages with '${tag[1]}' tag in 'html' item`)
}
resolve(item)
} else if(['graphviz'].includes(item.type)) {
if (!(item.text.startsWith('DOT FROM'))) {
tally(`Pages with unexpected '${item.type}' items`)
}
} else {
tally(`Pages where '${item.type}' items omitted`)
}
}
} else {
if (!missing.includes(title)) missing.push(title)
}
}
}
while(more.length) {
visit(more.shift())
}
for (let info of sitemap) {
if (!done.includes(info.title)) unreachable.push(info.title)
}
extras = links['dig-extras'] || []
writeFileStrSync(`${dataDir}/lastrun`, '')
lastrun = new Date()
} catch(e) {
console.log('exception in build', e)
}
}
async function makedot(page, item) {
var m
var text = item.text
// if (m = text.match(/^DOT FROM ([a-z0-9-]+)($|\n)/)) {
// let site = $item.parents('.page').data('site')||location.host
// let slug = m[1]
// // let page = $item.parents('.page').data('data')
// let poly = await polyget({name: slug, site, page})
// if (page = poly.page) {
// let redirect = page.story.find(each => each.type == 'graphviz')
// if (redirect) {
// text = redirect.text
// }
// }
// if (text == item.text) {
// return trouble("can't do", item.text)
// }
// }
if (m = text.match(/^DOT ((strict )?(di)?graph)\n/)) {
var root = tree(text.split(/\r?\n/), [], 0)
// console.log('root',root)
root.shift()
// var $page = $item.parents('.page')
// var here = $page.data('data')
var here = page
var context = {
graph: m[1],
name: here.title,
// site: $page.data('site')||location.host,
site: site,
page: here,
want: here.story.slice()
}
var dot = await evaluate(root, context, [])
// console.log('dot', dot)
return `${context.graph} {${dot.join("\n")}}`
} else {
return text
}
function tree(lines, here, indent) {
while (lines.length) {
let m = lines[0].match(/( *)(.*)/)
let spaces = m[1].length
let command = m[2]
if (spaces == indent) {
here.push(command)
// console.log('parse',command)
lines.shift()
} else if (spaces > indent) {
var more = []
here.push(more)
tree(lines, more, spaces)
} else {
return here
}
}
return here
}
function quote (string) {
return `"${string.replace(/ +/g,'\n')}"`
}
function trouble (text, detail) {
// console.log(text,detail)
throw new Error(text + "\n" + detail)
}
function collaborators (journal, implicit) {
// usage: collaborators(page.journal, [site, item.site, location.host])
let sites = journal
.filter(action=>action.site)
.map(action=>action.site)
sites.push(...implicit)
sites.reverse()
return sites
.filter((site,pos)=>sites.indexOf(site)==pos)
}
// async function probe (site, slug) {
// if (site === 'local') {
// const localPage = localStorage.getItem(slug)
// if (!localPage) {
// throw new Error('404 not found')
// }
// return JSON.parse(localPage)
// } else {
// // get returns a promise from $.ajax for relevant site adapters
// return wiki.site(site).get(`${slug}.json`, () => null)
// }
// }
async function polyget (context) {
if (context.name == context.page.title) {
return {site: context.site, page: context.page}
} else {
let slug = asSlug(context.name)
// let sites = collaborators(context.page.journal, [context.site, location.host, 'local'])
// console.log('resolution', slug, sites)
// for (let site of sites) {
// try {
// return {site, page: await probe(site,slug)}
// } catch (err) {
// // 404 not found errors expected
// }
// }
// return null
return {site: context.site, page: pages[slug]}
}
}
// function graphData(here, text) {
// // from https://github.com/WardCunningham/wiki-plugin-graph/blob/fb7346083870722a7fbec6a8dc1903eb93ff322c/client/graph.coffee#L10-L31
// var graph, left, line, merge, op, right, token, tokens, _i, _j, _len, _len1, _ref;
// merge = function(arcs, right) {
// if (arcs.indexOf(right) === -1) {
// return arcs.push(right);
// }
// };
// graph = {};
// left = op = right = null;
// _ref = text.split(/\n/);
// for (_i = 0, _len = _ref.length; _i < _len; _i++) {
// line = _ref[_i];
// tokens = line.trim().split(/\s*(-->|<--|<->)\s*/);
// for (_j = 0, _len1 = tokens.length; _j < _len1; _j++) {
// token = tokens[_j];
// if (token === '') {
// } else if (token === '-->' || token === '<--' || token === '<->') {
// op = token;
// } else {
// right = token === 'HERE' ? here : token;
// graph[right] || (graph[right] = []);
// if ((left != null) && (op != null) && (right != null)) {
// switch (op) {
// case '-->':
// merge(graph[left], right);
// break;
// case '<--':
// merge(graph[right], left);
// break;
// case '<->':
// merge(graph[left], right);
// merge(graph[right], left);
// }
// }
// left = right;
// op = right = null;
// }
// }
// }
// return graph;
// }
async function evaluate(tree, context, dot) {
let deeper = []
var pc = 0
while (pc < tree.length) {
let ir = tree[pc++]
const nest = () => (pc < tree.length && Array.isArray(tree[pc])) ? tree[pc++] : []
const peek = (keyword) => pc < tree.length && tree[pc]==keyword && pc++
if (Array.isArray(ir)) {
deeper.push({tree:ir, context})
} else if (ir.match(/^[A-Z]/)) {
// console.log('evaluate',ir)
if (ir.match(/^LINKS/)) {
let text = context.want.map(p=>p.text).join("\n")
let links = (text.match(/\[\[.*?\]\]/g)||[]).map(l => l.slice(2,-2))
let tree = nest()
links.map((link) => {
if (m = ir.match(/^LINKS HERE (->|--) NODE/)) {
dot.push(`${quote(context.name)} ${m[1]} ${quote(link)}`)
} else
if (m = ir.match(/^LINKS NODE (->|--) HERE/)) {
dot.push(`${quote(link)} ${m[1]} ${quote(context.name)}`)
} else
if (!ir.match(/^LINKS$/)) {
trouble("can't do link", ir)
}
if (tree.length) {
let new_context = Object.assign({},context,{name:link})
new_context.promise = polyget(new_context)
deeper.push({tree, context:new_context})
}
})
} else
// if (ir.match(/^GRAPH$/)) {
// for (let item of context.want) {
// if (item.type == 'graph') {
// let graph = graphData(context.name, item.text)
// let kind = context.graph.match(/digraph/) ? '->' : '--'
// for (let here in graph) {
// dot.push(`${quote(here)}`)
// for (let there of graph[here]) {
// dot.push(`${quote(here)} ${kind} ${quote(there)}`)
// }
// }
// }
// }
// } else
if (ir.match(/^HERE/)) {
let tree = nest()
let page = null
let site = ''
try {
if(context.promise) {
let poly = await context.promise
site = poly.site
page = poly.page
delete context.promise
} else {
let poly = await polyget(context)
site = poly.site
page = poly.page
}
} catch (err) {}
if (page) {
if (ir.match(/^HERE NODE$/)) {
dot.push(quote(context.name))
} else
if (ir.match(/^HERE NODE \w+/)) {
let kind = context.graph.match(/digraph/) ? '->' : '--'
dot.push(`${quote(ir)} ${kind} ${quote(context.name)} [style=dotted]`)
} else
if (!ir.match(/^HERE$/)) {
trouble("can't do here", ir)
}
deeper.push({tree, context:Object.assign({},context,{site, page, want:page.story})})
}
if (peek('ELSE')) {
let tree = nest()
if (!page) {
deeper.push({tree, context})
}
}
} else
if (ir.match(/^WHERE/)) {
let tree = nest()
var want = context.want
if (m = ir.match(/\/.*?\//)) {
let regex = new RegExp(m[0].slice(1,-1))
want = want.filter(item => (item.text||'').match(regex))
} else if (m = ir.match(/ FOLD ([a-z_-]+)/)) {
var within = false
want = want.filter((item) => {
if (item.type == 'pagefold') {
within = item.text == m[1]
}
return within
})
} else if (m = ir.match(/[a-z_]+/)) {
let attr = m[0]
want = want.filter(item => item[attr])
} else trouble("can't do where", ir)
deeper.push({tree, context:Object.assign({},context,{want})})
} // else
// if (ir.match(/^FAKE/)) {
// if (m = ir.match(/^FAKE HERE (->|--) NODE/)) {
// dot.push(`${quote(context.name)} ${m[1]} ${quote('post-'+context.name)}`)
// } else
// if (m = ir.match(/^FAKE NODE (->|--) HERE/)) {
// dot.push(`${quote('pre-'+context.name)} ${m[1]} ${quote(context.name)}`)
// } else trouble("can't do fake", ir)
// } else
// if (ir.match(/^LINEUP$/)) {
// let tree = nest()
// try {
// let $page = $item.parents('.page')
// let $lineup = $(`.page:lt(${$('.page').index($page)})`)
// $lineup.each((i,p) => {
// let site = $(p).data('site')||location.host
// let name = $(p).data('data').title
// deeper.push({tree, context:Object.assign({},context,{site, name})})
// })
// } catch {
// throw new Error("can't do LINEUP yet")
// }
// } else trouble("can't do", ir)
} else {
// console.log('evaluate',ir.toString())
dot.push(ir)
}
}
for (var i=0; i<deeper.length; i++) {
let child = deeper[i]
await evaluate(child.tree, child.context, dot)
}
return dot
}
}
const stations = {};
fetch('http://simnet.ward.asia.wiki.org/assets/pages/next-hop-routing/data.json')
.then(res => res.json())
.then(json => { for (let s of json) stations[s.zip] = s })
const digit = (zip, col) =>
Math.floor(zip/(10**(2-col)))%10
const hop = (from, toward) => {
for (let row of [0,1,2]) {
if(digit(from,row) != digit(toward,row)) {
let col = digit(toward,row)-1
return stations[from].route[row][col]
}
}
return null
}
function simnet(url) {
let [key, from, to, suffix] = url.split(/-|\./).map(z => z*1)
let title = `Route ${from}-${to}`
let text = `From ${stations[from].city} to ${stations[to].city}.`
let story = []
story.push({type:'paragraph', text})
let markup = []
while (from) {
let at = stations[from]
markup.push(`${at.lat},${at.lon} ${at.city}`)
from = hop(from, to)
}
story.push({type:'map', text:markup.join("\n")})
return {title, story}
}