-
Notifications
You must be signed in to change notification settings - Fork 13
/
Copy pathrss_rc.py
261 lines (225 loc) · 10.3 KB
/
rss_rc.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
"""
RSS Handling
If you do changes, please check if it still validates after your changes:
http://feedvalidator.org/
@copyright: 2006-2007 MoinMoin:ThomasWaldmann
@license: GNU GPL, see COPYING for details.
"""
import StringIO, re, time
from MoinMoin import wikixml, config, wikiutil
from MoinMoin.logfile import editlog
from MoinMoin.util import timefuncs
from MoinMoin.Page import Page
from MoinMoin.wikixml.util import RssGenerator
def full_url(request, page, querystr=None, anchor=None):
url = page.url(request, anchor=anchor, querystr=querystr)
url = wikiutil.escape(url)
return request.getQualifiedURL(url)
def execute(pagename, request):
""" Send recent changes as an RSS document
"""
if not wikixml.ok:
httpheaders = ["Content-Type: text/plain; charset=%s" % config.charset]
request.emit_http_headers(httpheaders)
request.write("rss_rc action is not supported because of missing pyxml module.")
return
cfg = request.cfg
# get params
items_limit = 100
try:
max_items = int(request.form['items'][0])
max_items = min(max_items, items_limit) # not more than `items_limit`
except (KeyError, ValueError):
# not more than 15 items in a RSS file by default
max_items = 15
try:
unique = int(request.form.get('unique', [0])[0])
except ValueError:
unique = 0
try:
diffs = int(request.form.get('diffs', [1])[0])
except ValueError:
diffs = 1
## ddiffs inserted by Ralf Zosel <[email protected]>, 04.12.2003
try:
ddiffs = int(request.form.get('ddiffs', [1])[0])
except ValueError:
ddiffs = 1
# get data
log = editlog.EditLog(request)
logdata = []
counter = 0
pages = {}
lastmod = 0
for line in log.reverse():
if not request.user.may.read(line.pagename):
continue
if (not line.action.startswith('SAVE') or
((line.pagename in pages) and unique)): continue
#if log.dayChanged() and log.daycount > _MAX_DAYS: break
line.editor = line.getInterwikiEditorData(request)
line.time = timefuncs.tmtuple(wikiutil.version2timestamp(line.ed_time_usecs)) # UTC
logdata.append(line)
pages[line.pagename] = None
if not lastmod:
lastmod = wikiutil.version2timestamp(line.ed_time_usecs)
counter += 1
if counter >= max_items:
break
del log
timestamp = timefuncs.formathttpdate(lastmod)
etag = "%d-%d-%d-%d-%d" % (lastmod, max_items, diffs, ddiffs, unique)
# for 304, we look at if-modified-since and if-none-match headers,
# one of them must match and the other is either not there or must match.
if request.if_modified_since == timestamp:
if request.if_none_match:
if request.if_none_match == etag:
request.emit_http_headers(["Status: 304 Not modified"])
else:
request.emit_http_headers(["Status: 304 Not modified"])
elif request.if_none_match == etag:
if request.if_modified_since:
if request.if_modified_since == timestamp:
request.emit_http_headers(["Status: 304 Not modified"])
else:
request.emit_http_headers(["Status: 304 Not modified"])
else:
# generate an Expires header, using whatever setting the admin
# defined for suggested cache lifetime of the RecentChanges RSS doc
expires = timefuncs.formathttpdate(time.time() + cfg.rss_cache)
httpheaders = ["Content-Type: text/xml; charset=%s" % config.charset,
"Expires: %s" % expires,
"Last-Modified: %s" % timestamp,
"Etag: %s" % etag, ]
# send the generated XML document
request.emit_http_headers(httpheaders)
baseurl = request.getBaseURL()
if not baseurl.endswith('/'):
baseurl += '/'
logo = re.search(r'src="([^"]*)"', cfg.logo_string)
if logo:
logo = request.getQualifiedURL(logo.group(1))
# prepare output
out = StringIO.StringIO()
handler = RssGenerator(out)
# start SAX stream
handler.startDocument()
handler._out.write(
'<!--\n'
' Add an "items=nnn" URL parameter to get more than the default 15 items.\n'
' You cannot get more than %d items though.\n'
' \n'
' Add "unique=1" to get a list of changes where page names are unique,\n'
' i.e. where only the latest change of each page is reflected.\n'
' \n'
' Add "diffs=0" to remove change diffs to the description of each items.\n'
' \n'
' Add "ddiffs=0" to link directly to the wikipage\n'
' Current settings: items=%i, unique=%i, diffs=%i, ddiffs=%i'
'-->\n' % (items_limit, max_items, unique, diffs, ddiffs)
)
# emit channel description
handler.startNode('channel', {
(handler.xmlns['rdf'], 'about'): request.getBaseURL(),
})
handler.simpleNode('title', cfg.sitename)
page = Page(request, pagename)
handler.simpleNode('link', full_url(request, page))
handler.simpleNode('description', 'RecentChanges at %s' % cfg.sitename)
if logo:
handler.simpleNode('image', None, {
(handler.xmlns['rdf'], 'resource'): logo,
})
if cfg.interwikiname:
handler.simpleNode(('wiki', 'interwiki'), cfg.interwikiname)
handler.startNode('items')
handler.startNode(('rdf', 'Seq'))
for item in logdata:
anchor = "%04d%02d%02d%02d%02d%02d" % item.time[:6]
page = Page(request, item.pagename)
link = full_url(request, page, anchor=anchor)
handler.simpleNode(('rdf', 'li'), None, attr={(handler.xmlns['rdf'], 'resource'): link, })
handler.endNode(('rdf', 'Seq'))
handler.endNode('items')
handler.endNode('channel')
# emit logo data
if logo:
handler.startNode('image', attr={
(handler.xmlns['rdf'], 'about'): logo,
})
handler.simpleNode('title', cfg.sitename)
handler.simpleNode('link', baseurl)
handler.simpleNode('url', logo)
handler.endNode('image')
# emit items
for item in logdata:
page = Page(request, item.pagename)
anchor = "%04d%02d%02d%02d%02d%02d" % item.time[:6]
rdflink = full_url(request, page, anchor=anchor)
handler.startNode('item', attr={(handler.xmlns['rdf'], 'about'): rdflink, })
# general attributes
handler.simpleNode('title', item.pagename)
if ddiffs:
handler.simpleNode('link', full_url(request, page, querystr={'action': 'diff'}))
else:
handler.simpleNode('link', full_url(request, page))
handler.simpleNode(('dc', 'date'), timefuncs.W3CDate(item.time))
# description
if item.editor[0] == 'interwiki':
edname = "%s:%s" % item.editor[1]
##edattr[(None, 'link')] = baseurl + wikiutil.quoteWikiname(edname)
else: # 'ip'
edname = item.editor[1]
##edattr[(None, 'link')] = link + "?action=info"
# this edattr stuff, esp. None as first tuple element breaks things (tracebacks)
# if you know how to do this right, please send us a patch
user = edname.split(':')[-1]
user_link = full_url(request, Page(request, user))
desc_text = 'Cambio por <a href="%s">%s</a> -- "%s"' % (user_link, user, item.comment)
if diffs:
# TODO: rewrite / extend wikiutil.pagediff
# searching for the matching pages doesn't really belong here
revisions = page.getRevList()
rl = len(revisions)
for idx in range(rl):
rev = revisions[idx]
if rev <= item.rev:
if idx + 1 < rl:
lines = wikiutil.pagediff(request, item.pagename, revisions[idx+1], item.pagename, 0, ignorews=1)
if len(lines) > 20:
lines = lines[:20] + ['... (Continua)\n']
# vamos a colorear las lineas!
fixed_lines = []
for line in lines:
line = wikiutil.escape(line)
if line.startswith('+'):
line = "<font color='green'>%s</font>" % line
elif line.startswith('-'):
line = "<font color='red'>%s</font>" % line
fixed_lines.append(line)
lines = fixed_lines
lines = '\n'.join(lines)
desc_text = '%s\n<pre>\n%s\n</pre>\n' % (desc_text, lines)
break
if desc_text:
handler.simpleNode('description', desc_text)
# contributor
edattr = {}
if cfg.show_hosts:
edattr[(handler.xmlns['wiki'], 'host')] = item.hostname
handler.startNode(('dc', 'contributor'))
handler.startNode(('rdf', 'Description'), attr=edattr)
handler.simpleNode(('rdf', 'value'), edname)
handler.endNode(('rdf', 'Description'))
handler.endNode(('dc', 'contributor'))
# wiki extensions
handler.simpleNode(('wiki', 'version'), "%i" % (item.ed_time_usecs))
handler.simpleNode(('wiki', 'status'), ('deleted', 'updated')[page.exists()])
handler.simpleNode(('wiki', 'diff'), full_url(request, page, querystr={'action': 'diff'}))
handler.simpleNode(('wiki', 'history'), full_url(request, page, querystr={'action': 'info'}))
# handler.simpleNode(('wiki', 'importance'), ) # ( major | minor )
# handler.simpleNode(('wiki', 'version'), ) # ( #PCDATA )
handler.endNode('item')
# end SAX stream
handler.endDocument()
request.write(out.getvalue())