Attachment 'RecentChangesPatrick.py'
Download 1 """
2 MoinMoin - RecentChanges Macro
3
4 Copyright (c) 2000, 2001, 2002 by Jürgen Hermann <jh@web.de>
5 All rights reserved, see COPYING for details.
6
7 $Id: RecentChanges.py,v 1.59 2002/05/02 19:13:07 jhermann Exp $
8 """
9
10 # Imports
11 import cgi, re, string, sys, time, cStringIO
12 from MoinMoin import config, editlog, user, util, wikiutil, wikixml
13 from MoinMoin.Page import Page
14 from MoinMoin.i18n import _
15
16 _MAX_DAYS = 14
17 _MAX_PAGENAME_LENGTH = 35
18
19 #############################################################################
20 ### RecentChanges Macro
21 #############################################################################
22
23 def execute(macro, args, **kw):
24 abandoned = kw.get('abandoned', 0)
25 log = LogIterator(macro.request, reverse=abandoned)
26
27 tnow = time.time()
28 msg = ""
29 buf = cStringIO.StringIO()
30
31 # add rss link
32 if wikixml.ok and not abandoned:
33 img = macro.formatter.image(width=36, height=14, hspace=2, align="right",
34 border=0, src=config.url_prefix+"/img/moin-rss.gif", alt="[RSS]")
35 buf.write(macro.formatter.url(
36 wikiutil.quoteWikiname(macro.formatter.page.page_name) + "?action=rss_rc",
37 img, unescaped=1))
38
39 # add bookmark link if valid user
40 if abandoned:
41 bookmark = None
42 else:
43 bookmark = macro.request.user.getBookmark()
44 if macro.request.user.valid:
45 bm_display = _('(no bookmark set)')
46 if bookmark:
47 bm_display = _('(currently set to %s)') % (
48 macro.request.user.getFormattedDateTime(bookmark),)
49
50 buf.write("%s %s<br>" % (
51 wikiutil.link_tag(
52 wikiutil.quoteWikiname(macro.formatter.page.page_name)
53 + "?action=bookmark&time=%d" % (tnow,),
54 _("Update my bookmark timestamp"),
55 formatter=macro.formatter),
56 bm_display,
57 ))
58
59 oldversions = wikiutil.getBackupList(config.backup_dir, None)
60
61 # get the most recent date each page was edited
62 if abandoned:
63 # !!! TODO: add existing pages that do not appear in the edit log at all
64 last_edit = {}
65 while log.next():
66 last_edit[log.pagename] = log.ed_time
67 log.reset()
68
69 buf.write('<table border=0 cellspacing=2 cellpadding=0>')
70 while log.getNextChange():
71 if abandoned and log.ed_time < last_edit[log.pagename]:
72 continue
73
74 # check for configured max size
75 if config.max_macro_size and buf.tell() > config.max_macro_size*1024:
76 msg = "<br><font size='-1'>[Size limited to %dK]</font>" % (config.max_macro_size,)
77 break
78
79 if log.dayChanged():
80 if log.daycount > _MAX_DAYS: break
81
82 set_bm = ''
83 if macro.request.user.valid and not abandoned:
84 set_bm = ' <font size="1" face="Verdana">[%s]</font>' % (
85 wikiutil.link_tag(
86 wikiutil.quoteWikiname(macro.formatter.page.page_name)
87 + "?action=bookmark&time=%d" % (log.ed_time,),
88 _("set bookmark"), formatter=macro.formatter),)
89
90 buf.write('<tr><td colspan="%d"><br/><H2>%s %s</H2></td></tr>\n'
91 % (4+config.show_hosts, macro.request.user.getFormattedDate(log.ed_time), set_bm))
92
93 markrow=0
94
95 # check whether this page is newer than the user's bookmark
96 hilite = log.ed_time > (bookmark or log.ed_time)
97
98 # check whether this is a new (no backup) page
99 # !!! the backup dir needs to be reorganized, one subdir per page, and the versions
100 # in the subdirs, i.e. data/backup/<pagename>/<timestamp>; this will do for now
101 backup_re = re.compile(r'^%s\.\d+(\.\d+)?$' % (wikiutil.quoteFilename(log.pagename),))
102 is_new = len(filter(backup_re.match, oldversions)) == 0
103 page = Page(log.pagename)
104
105 html_link = ''
106 if not page.exists():
107 # indicate page was deleted
108 html_link = '<img border="0" hspace="3" width="60" height="12" src="%s/img/moin-deleted.gif" alt="[DELETED]">' % (
109 config.url_prefix)
110 elif is_new:
111 # show "NEW" icon if page was created after the user's bookmark
112 if hilite:
113 if page.exists():
114 html_link = '<img border="0" hspace="3" width="31" height="12" src="%s/img/moin-new.gif" alt="[NEW]">' % (
115 config.url_prefix)
116 elif hilite:
117 # show "UPDATED" icon if page was edited after the user's bookmark
118 img = '<img border="0" hspace="3" width="60" height="12" src="%s/img/moin-updated.gif" alt="[UPDATED]">' % (
119 config.url_prefix)
120 html_link = wikiutil.link_tag(
121 wikiutil.quoteWikiname(log.pagename) + "?action=diff&date=" + str(bookmark),
122 img, formatter=macro.formatter, pretty_url=1)
123 else:
124 # show "DIFF" icon else
125 img = '<img border="0" hspace="11" width="15" height="11" src="%s/img/moin-diff.gif" alt="[DIFF]">' % (
126 config.url_prefix)
127 html_link = wikiutil.link_tag(
128 wikiutil.quoteWikiname(log.pagename) + "?action=diff",
129 img, formatter=macro.formatter, pretty_url=1)
130
131 # print name of page, with a link to it
132 if markrow==1:
133 rowbgcolor="bgcolor='#DDDDDD'"
134 markrow=0
135 else:
136 rowbgcolor="bgcolor='#FFFFFF'"
137 markrow=1
138
139 force_split = len(page.page_name) > _MAX_PAGENAME_LENGTH
140 buf.write('<tr %s valign="top"><td>%s </td><td>%s</td><td> ' % (
141 rowbgcolor, html_link, page.link_to(text=page.split_title(force=force_split)),))
142
143 # print time of change
144 if config.changed_time_fmt:
145 tdiff = int(tnow - log.ed_time) / 60
146 if tdiff < 1440:
147 buf.write(_("[%(hours)dh %(mins)dm ago]") % {
148 'hours': int(tdiff/60), 'mins': tdiff%60})
149 else:
150 buf.write(time.strftime(config.changed_time_fmt, log.time_tuple))
151 buf.write(" </td><td>")
152
153 changelog = log.changes[log.pagename]
154 changelog.reverse()
155 changecount = len(changelog)
156
157 # print editor name or IP
158 rowstart = "<tr valign='top'><td>"
159 rownext = "</td><td>"
160 rowend = "</td></tr>"
161 if config.show_hosts:
162 if changecount > 1:
163 patricklist = []
164 for idx in range(changecount):
165 name = changelog[idx][0]
166 patricklist.append(name)
167
168 maxi=len(patricklist)
169 # patricklist.reverse()
170 editcount=1
171
172 buf.write("<table border=0>")
173
174 commentlist=""
175 for i in range(maxi):
176 writerow=1
177 if i==maxi-1: #letzter Edit?
178 if editcount==1:
179 buf.write(rowstart + patricklist[i])
180 else:
181 buf.write(rowstart + patricklist[i]+"("+str(editcount)+")")
182 else:
183 if patricklist[i]==patricklist[i+1]:
184 editcount=editcount+1
185 writerow=0
186 else:
187 if editcount==1:
188 buf.write(rowstart + patricklist[i]+" ")
189 else:
190 buf.write(rowstart + patricklist[i]+"("+str(editcount)+") ")
191 editcount=1
192
193 # TODO Hier nach rownext die entsprechenden Kommentare ausgeben!
194 currcomment = cgi.escape(changelog[i][1])
195 if len(currcomment)>0:
196 commentlist=commentlist+"<li>"+currcomment
197 if writerow==1:
198 if len(commentlist)>0:
199 buf.write(rownext + "" + commentlist + "") # ul und /ul entfernt wegen folgender Leerzeile
200 else:
201 buf.write(rownext + " ")
202
203 commentlist=""
204
205 buf.write(rowend)
206
207 buf.write("</table>")
208
209 else:
210 currcomment = cgi.escape(changelog[0][1])
211 if len(currcomment)>0:
212 currcomment="<li>"+currcomment
213 buf.write("<table border=0>")
214 buf.write(rowstart + log.getEditor()+ rownext + currcomment + rowend)
215 buf.write("</table>")
216
217
218 buf.write('</td></tr>\n')
219
220 buf.write('</table>')
221 if msg: buf.write(msg)
222
223 return macro.formatter.rawHTML(buf.getvalue())
224
225
226 #############################################################################
227 ### LogIterator
228 #############################################################################
229
230 class LogIterator(editlog.EditLog):
231
232 def __init__(self, request, **kw):
233 apply(editlog.EditLog.__init__, (self,), kw)
234 self.request = request
235 self.changes = {}
236 self.daycount = 0
237 self.ratchet_day = None
238 self.unique = kw.get('unique', 1)
239
240 def getNextChange(self):
241 if not self.next(): return 0
242 if not self.unique: return 1
243
244 # skip already processed pages
245 while self.changes.has_key(self.pagename):
246 if not self.next(): return 0
247
248 # we see this page for the first time, collect changes in this day
249 thispage = self.pagename
250 self.changes[thispage] = []
251 offset = 0
252 ratchet_day = None
253 breakbookmark = self.request.user.getBookmark()
254 while 1:
255
256 # store comments for this page
257 if self.pagename == thispage:
258 comment = self.comment
259 if self.action[:3] == 'ATT':
260 import urllib
261 filename = urllib.unquote(comment)
262 if self.action == 'ATTNEW':
263 comment = _("Upload of attachment '%(filename)s'.") % locals()
264 elif self.action == 'ATTDEL':
265 comment = _("Attachment '%(filename)s' deleted.") % locals()
266 elif self.action == 'ATTDRW':
267 comment = _("Drawing '%(filename)s' saved.") % locals()
268 self.changes[thispage].append((self.getEditor(), comment))
269 # peek for the next one
270 if not self.peek(offset): break
271 offset = offset + 1
272 if breakbookmark>self.ed_time or not breakbookmark:
273 break
274
275
276 # restore correct data
277 return self.peek(-1)
278
279 def dayChanged(self):
280 self.time_tuple = self.request.user.getTime(self.ed_time)
281 self.day = tuple(self.time_tuple[0:3])
282 if self.day != self.ratchet_day:
283 self.daycount = self.daycount + 1
284 self.ratchet_day = self.day
285 return 1
286 return 0
287
288
289 #############################################################################
290 ### RSS Handling
291 #############################################################################
292 if wikixml.ok:
293
294 from MoinMoin.wikixml.util import RssGenerator
295
296 def rss(pagename, request):
297 """ Send recent changes as an RSS document
298 """
299 from MoinMoin import webapi
300 import os, new
301
302 # get params
303 items_limit = 100
304 try:
305 max_items = int(request.form['items'].value)
306 max_items = min(max_items, items_limit) # not more than `items_limit`
307 except (KeyError, ValueError):
308 # not more than 15 items in a RSS file by default
309 max_items = 15
310 try:
311 unique = int(request.form.getvalue('unique', 0))
312 except ValueError:
313 unique = 0
314 try:
315 diffs = int(request.form.getvalue('diffs', 0))
316 except ValueError:
317 diffs = 0
318
319 # prepare output
320 out = cStringIO.StringIO()
321 handler = RssGenerator(out)
322
323 # get data
324 interwiki = webapi.getBaseURL()
325 if interwiki[-1] != "/": interwiki = interwiki + "/"
326
327 logo = re.search(r'src="([^"]*)"', config.logo_string)
328 if logo: logo = webapi.getQualifiedURL(logo.group(1))
329
330 log = LogIterator(request, unique=unique)
331 logdata = []
332 counter = 0
333 Bag = new.classobj('Bag', (), {})
334 while log.getNextChange():
335 if log.dayChanged() and log.daycount > _MAX_DAYS: break
336 if log.action != 'SAVE': continue
337 logdata.append(new.instance(Bag, {
338 'ed_time': log.ed_time,
339 'time': log.time_tuple,
340 'pagename': log.pagename,
341 'hostname': log.hostname,
342 'editor': log.getEditorData(),
343 'comment': log.comment,
344 }))
345
346 counter = counter + 1
347 if counter >= max_items: break
348 del log
349
350 # start SAX stream
351 handler.startDocument()
352 handler._out.write(
353 '<!--\n'
354 ' Add an "items=nnn" URL parameter to get more than the default 15 items.\n'
355 ' You cannot get more than %d items though.\n'
356 ' \n'
357 ' Add "unique=1" to get a list of changes where page names are unique,\n'
358 ' i.e. where only the latest change of each page is reflected.\n'
359 ' \n'
360 ' Add "diffs=1" to add change diffs to the description of each items.\n'
361 '-->\n' % items_limit
362 )
363
364 # emit channel description
365 handler.startNode('channel', {
366 (handler.xmlns['rdf'], 'about'): webapi.getBaseURL(),
367 })
368 handler.simpleNode('title', config.sitename)
369 handler.simpleNode('link', interwiki + wikiutil.quoteWikiname(pagename))
370 handler.simpleNode('description', 'RecentChanges at %s' % config.sitename)
371 if logo:
372 handler.simpleNode('image', None, {
373 (handler.xmlns['rdf'], 'resource'): logo,
374 })
375 if config.interwikiname:
376 handler.simpleNode(('wiki', 'interwiki'), config.interwikiname)
377
378 handler.startNode('items')
379 handler.startNode(('rdf', 'Seq'))
380 for item in logdata:
381 link = "%s%s#%04d%02d%02d%02d%02d%02d" % ((interwiki,
382 wikiutil.quoteWikiname(item.pagename),) + item.time[:6])
383 handler.simpleNode(('rdf', 'li'), None, attr={
384 (handler.xmlns['rdf'], 'resource'): unicode(link, config.charset),
385 })
386 handler.endNode(('rdf', 'Seq'))
387 handler.endNode('items')
388 handler.endNode('channel')
389
390 # emit logo data
391 if logo:
392 handler.startNode('image', attr={
393 (handler.xmlns['rdf'], 'about'): logo,
394 })
395 handler.simpleNode('title', config.sitename)
396 handler.simpleNode('link', interwiki)
397 handler.simpleNode('url', logo)
398 handler.endNode('image')
399
400 # emit items
401 for item in logdata:
402 page = Page(item.pagename)
403 link = interwiki + wikiutil.quoteWikiname(item.pagename)
404 rdflink = "%s#%04d%02d%02d%02d%02d%02d" % ((link,) + item.time[:6])
405 handler.startNode('item', attr={
406 (handler.xmlns['rdf'], 'about'): rdflink,
407 })
408
409 # general attributes
410 handler.simpleNode('title', item.pagename)
411 handler.simpleNode('link', link)
412 handler.simpleNode(('dc', 'date'), util.W3CDate(item.time))
413
414 # description
415 desc_text = item.comment
416 if diffs:
417 # !!! TODO: rewrite / extend wikiutil.pagediff
418 # searching for the matching pages doesn't really belong here
419 # also, we have a problem to get a diff between two backup versions
420 # so it's always a diff to the current version for now
421 oldversions = wikiutil.getBackupList(config.backup_dir, item.pagename)
422
423 for idx in range(len(oldversions)):
424 oldpage = oldversions[idx]
425 try:
426 date = os.path.getmtime(os.path.join(config.backup_dir, oldpage))
427 except EnvironmentError:
428 continue
429 if date <= item.ed_time:
430 if idx+1 < len(oldversions):
431 rc, page_file, backup_file, lines = wikiutil.pagediff(item.pagename, oldversions[idx+1], ignorews=1)
432 if len(lines) > 20: lines = lines[20:] + ['...\n']
433 desc_text = desc_text + '<pre>\n' + string.join(lines, '') + '</pre>'
434 break
435 if desc_text:
436 handler.simpleNode('description', desc_text)
437
438 # contributor
439 edattr = {}
440 if config.show_hosts:
441 edattr[(handler.xmlns['wiki'], 'host')] = unicode(item.hostname, config.charset)
442 if isinstance(item.editor, Page):
443 edname = item.editor.page_name
444 edattr[(None, 'link')] = interwiki + wikiutil.quoteWikiname(edname)
445 else:
446 edname = item.editor
447 ##edattr[(None, 'link')] = link + "?action=info"
448 handler.startNode(('dc', 'contributor'))
449 handler.startNode(('rdf', 'Description'), attr=edattr)
450 handler.simpleNode(('rdf', 'value'), edname)
451 handler.endNode(('rdf', 'Description'))
452 handler.endNode(('dc', 'contributor'))
453
454 # wiki extensions
455 handler.simpleNode(('wiki', 'version'), "%04d-%02d-%02d %02d:%02d:%02d" % item.time[:6])
456 handler.simpleNode(('wiki', 'status'), ('deleted', 'updated')[page.exists()])
457 handler.simpleNode(('wiki', 'diff'), link + "?action=diff")
458 handler.simpleNode(('wiki', 'history'), link + "?action=info")
459 # handler.simpleNode(('wiki', 'importance'), ) # ( major | minor )
460 # handler.simpleNode(('wiki', 'version'), ) # ( #PCDATA )
461
462 handler.endNode('item')
463
464 # end SAX stream
465 handler.endDocument()
466
467 # send the generated XML document
468 webapi.http_headers(request, ["Content-Type: " + 'text/xml'] + webapi.nocache)
469 sys.stdout.write(out.getvalue())
470
471 sys.exit(0)
Attached Files
To refer to attachments on a page, use attachment:filename, as shown below in the list of files. Do NOT use the URL of the [get] link, since this is subject to change and can break easily.You are not allowed to attach a file to this page.