-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathwiki.go
389 lines (324 loc) · 10.3 KB
/
wiki.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
package main
import (
"html/template"
"net/http"
"os"
"regexp"
"log"
"strings"
"time"
"strconv"
md "github.com/JohannesKaufmann/html-to-markdown"
"github.com/microcosm-cc/bluemonday"
"github.com/russross/blackfriday"
)
var wikiDir string
var tagDir string
var pubDir string
var ekey []byte
var encryptionFlag = []byte("ENCRYPTED")
var specialDir []string
type basePage struct {
Title string
Nav nav
}
type wikiPage struct {
Body template.HTML
Tags string
TagArray []string
Created string
Modified string
Published bool
Encrypted bool
basePage
Index []string
}
type searchPage struct {
basePage
Results []QueryResults
}
type mdConverter interface {
ConvertURL(string) (string, error)
}
func checkErr(err error) {
if err != nil {
panic(err)
}
}
func getPDFFilename(folder, name string) string {
return folder + name
}
func getWikiFilename(folder, name string) string {
return folder + name + ".md"
}
func getWikiTagsFilename(name string) string {
return tagDir + name
}
func getWikiPubFilename(name string) string {
return pubDir + name
}
func (p *wikiPage) save(s storage) error {
var err error
filename := getWikiFilename(wikiDir, p.Title)
body := []byte(p.Body)
if p.Encrypted {
body, err = encrypt(body, ekey)
if err != nil {
return err
}
body = append(encryptionFlag, body...)
}
err = s.storeFile(filename, body)
if err != nil {
return err
}
tagsfile := getWikiTagsFilename(p.Title)
err = s.storeFile(tagsfile, []byte(p.Tags))
if err != nil {
return err
}
pubfile := getWikiPubFilename(p.Title)
if p.Published {
err = s.storeFile(pubfile, nil)
if err != nil {
return err
}
} else {
// Only return an error if something other than file doesnt exit
// We expect this fail to not exist most of the time but we dont know if
// we don't try
if err = s.deleteFile(pubfile); !os.IsNotExist(err) {
return err
}
}
return nil
}
func convertMarkdown(page *wikiPage, err error) (*wikiPage, error) {
if err != nil {
return page, err
}
p := bluemonday.UGCPolicy()
p.AllowAttrs("class").Matching(regexp.MustCompile("^language-[a-zA-Z0-9]+$")).OnElements("code")
page.Body = template.HTML(regexp.MustCompile("\r\n").ReplaceAllString(string(page.Body), "\n"))
unsafe := blackfriday.Run([]byte(page.Body),
blackfriday.WithExtensions(
blackfriday.CommonExtensions|
blackfriday.HardLineBreak|
blackfriday.HeadingIDs|
blackfriday.AutoHeadingIDs,
),
)
page.Body = template.HTML(p.SanitizeBytes(unsafe))
return page, nil
}
func viewHandler(w http.ResponseWriter, r *http.Request, p *wikiPage, s storage) {
p, err := convertMarkdown(s.getPage(p))
if err != nil {
p, err = s.checkForPDF(p)
if err != nil {
http.Redirect(w, r, "/wiki/edit/"+p.Title, http.StatusFound)
return
}
} else {
p.Body = template.HTML(parseWikiWords([]byte(p.Body)))
}
renderTemplate(w, "view", p)
}
func editHandler(w http.ResponseWriter, r *http.Request, p *wikiPage, s storage) {
p, _ = s.getPage(p)
renderTemplate(w, "edit", p)
}
func makeSearchHandler(fn navFunc, s storage) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
term := r.URL.Query().Get("term") // Get the search term
if len(term) == 0 {
http.NotFound(w, r)
return
}
results := ParseQueryResults(s.searchPages(wikiDir, term))
p := &searchPage{Results: results, basePage: basePage{Title: "Search", Nav: fn(s)}}
renderTemplate(w, "search", p)
}
}
func simpleHandler(page string, fn navFunc, s storage) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
renderTemplate(w, page, fn(s))
}
}
func saveHandler(w http.ResponseWriter, r *http.Request, wiki string, s storage) string {
body := r.FormValue("body")
body = regexp.MustCompile("\r\n").ReplaceAllString(body, "\n")
p := wikiPage{basePage: basePage{Title: wiki}, Body: template.HTML(body), Tags: r.FormValue("wikitags")}
if r.FormValue("wikipub") == "on" {
p.Published = true
}
if r.FormValue("wikicrypt") == "on" {
p.Encrypted = true
}
err := p.save(s)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return ""
}
http.Redirect(w, r, "/wiki/view/"+p.Title, http.StatusFound)
return r.FormValue("wikitags")
}
func deleteHandler(w http.ResponseWriter, r *http.Request, p *wikiPage, s storage) {
filename := getWikiFilename(wikiDir, p.Title)
if err := s.deleteFile(filename); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
tagsfile := getWikiTagsFilename(p.Title)
if err := s.deleteFile(tagsfile); err != nil && !os.IsNotExist(err) {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
http.Redirect(w, r, "/wiki", http.StatusFound)
}
func moveHandler(w http.ResponseWriter, r *http.Request, p *wikiPage, s storage) {
from := getWikiFilename(wikiDir, p.Title)
to := r.FormValue("to")
if len(to) == 0 {
http.Error(w, "Form param 'to' needs setting", http.StatusBadRequest)
}
tofile := getWikiFilename(wikiDir, to)
if err := s.moveFile(from, tofile); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
tagsfile := getWikiTagsFilename(p.Title)
totags := getWikiTagsFilename(to)
if err := s.moveFile(tagsfile, totags); err != nil && !os.IsNotExist(err) {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
http.Redirect(w, r, "/wiki/view/"+to, http.StatusFound)
}
func scrapeHandler(w http.ResponseWriter, r *http.Request, mdc mdConverter, st storage) {
url := r.FormValue("url")
name := r.FormValue("target")
body, err := mdc.ConvertURL(url)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
// TODO Pass in file store and then when convert is called save to a new file
// Need a means of determining where to save the file to...perhaps whatever is
// specified - that should work for folders, etc already :-)
p := wikiPage{basePage: basePage{Title: name}, Body: template.HTML(body), Tags: "Scraped"}
err = p.save(st)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
http.Redirect(w, r, "/wiki/view/"+name, http.StatusFound)
}
var templates = template.Must(template.ParseFiles(
"views/edit.html",
"views/view.html",
"views/pub.html",
"views/pubhome.html",
"views/home.html",
"views/list.html",
"views/search.html",
"views/index.html",
"views/footer.html",
"views/recents.html",
"views/leftnav.html"))
func renderTemplate(w http.ResponseWriter, tmpl string, p interface{}) {
err := templates.ExecuteTemplate(w, tmpl+".html", p)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
}
}
var validPath = regexp.MustCompile(`^/wiki/(edit|save|view|search|delete|move|scrape)/([a-zA-Z0-9\.\-_ /]*)$`)
func makeHandler(fn func(http.ResponseWriter, *http.Request, *wikiPage, storage), navfn navFunc, s storage) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
wword := r.URL.Query().Get("wword") // Get the wiki word param if available
if len(wword) == 0 {
m := validPath.FindStringSubmatch(r.URL.Path)
if m == nil {
http.NotFound(w, r)
return
}
wword = m[2]
}
p := &wikiPage{basePage: basePage{Title: wword, Nav: navfn(s)}}
fn(w, r, p, s)
}
}
func makeScrapeHandler(fn func(http.ResponseWriter, *http.Request, mdConverter, storage), mdc mdConverter, fs storage) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
fn(w, r, mdc, fs)
}
}
func processSave(fn func(http.ResponseWriter, *http.Request, string, storage) string, s storage) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
m := validPath.FindStringSubmatch(r.URL.Path)
if m == nil {
http.NotFound(w, r)
return
}
fn(w, r, m[2], s)
}
}
func parseWikiWords(target []byte) []byte {
var wikiWord = regexp.MustCompile(`\{\{([^\}^#]+)[#]*(.*)\}\}`)
return wikiWord.ReplaceAll(target, []byte("<a href=\"/wiki/view/$1#$2\">$1</a>"))
}
func loggingHandler(next http.Handler) http.Handler {
fn := func(w http.ResponseWriter, r *http.Request) {
t1 := time.Now()
next.ServeHTTP(w, r)
t2 := time.Now()
log.Printf("[%s] %q %v\n", r.Method, r.URL.String(), t2.Sub(t1))
}
return http.HandlerFunc(fn)
}
func main() {
specialDir = []string{"tags", "pub"}
config, err := LoadConfig()
if err != nil {
log.Fatal(err)
}
if config.Logfile != "" {
f, err := os.OpenFile(config.Logfile, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0666)
if err != nil {
checkErr(err)
}
defer f.Close()
log.SetOutput(f)
}
wikiDir = config.WikiDir
if !strings.HasSuffix(wikiDir, "/") {
wikiDir = wikiDir + "/"
}
tagDir = wikiDir + "tags/" // Make sure this doesnt double up the / in the path...
pubDir = wikiDir + "pub/" // Make sure this doesnt double up the / in the path...
ekey = []byte(config.EncryptionKey)
os.Mkdir(config.WikiDir, 0755)
os.Mkdir(config.WikiDir+"tags", 0755)
httpmux := http.NewServeMux()
cached := newCachedStorage(fileStorage{tagDir}, wikiDir, tagDir)
fstore := &cached
htmltomd := md.NewConverter("", true, nil)
httpmux.Handle("/wiki", loggingHandler(simpleHandler("home", getNav, fstore)))
httpmux.Handle("/wiki/list/", loggingHandler(simpleHandler("list", getNav, fstore)))
httpmux.Handle("/wiki/search/", loggingHandler(makeSearchHandler(getNav, fstore)))
httpmux.Handle("/wiki/view/", loggingHandler(makeHandler(viewHandler, getNav, fstore)))
httpmux.Handle("/wiki/edit/", loggingHandler(makeHandler(editHandler, getNav, fstore)))
httpmux.Handle("/wiki/save/", loggingHandler(processSave(saveHandler, fstore)))
httpmux.Handle("/wiki/delete/", loggingHandler(makeHandler(deleteHandler, getNav, fstore)))
httpmux.Handle("/wiki/move/", loggingHandler(makeHandler(moveHandler, getNav, fstore)))
httpmux.Handle("/wiki/scrape/", loggingHandler(makeScrapeHandler(scrapeHandler, htmltomd, fstore)))
httpmux.Handle("/wiki/raw/", http.StripPrefix("/wiki/raw/", http.FileServer(http.Dir(wikiDir))))
httpmux.Handle("/pub/", loggingHandler(makePubHandler(pubHandler, getNav, fstore)))
httpmux.Handle("/pub", loggingHandler(simpleHandler("pubhome", getPubNav, fstore)))
httpmux.Handle("/api", loggingHandler(apiHandler(innerAPIHandler, fstore)))
// Listen for normal traffic against root
httpmux.Handle("/", http.FileServer(http.Dir("wwwroot")))
httpmux.Handle("/static/", http.StripPrefix("/static/", http.FileServer(http.Dir("static"))))
err = http.ListenAndServe(":"+strconv.Itoa(config.HTTPPort), httpmux)
checkErr(err)
}