This processor requires Futures and rssparser.py.
1 """
2 MoinMoin - Processor for UnifiedRecentChanges
3
4 Use:
5 { {{#!URC
6 wiki title
7 wiki RSS feed URL
8 wiki title
9 wiki RSS feed URL
10 ...
11 ...
12 }} }
13
14 ( without the spaces in { {{ or }} } )
15 """
16
17
18
19 import string, sys, time
20 import feedparser # RSS
21 from future import Future # threading
22 from MoinMoin.parser import python
23
24
25
26 def bundle_into_pairs( l ):
27 """
28 [1,2,3,4,5,6,7] -> [(1,2),(3,4),(5,6)]
29 """
30 two_sides = [ [],[] ]
31 x=0
32 for item in l:
33 two_sides[x%2].append(item)
34 x=x+1
35 return zip( two_sides[0],two_sides[1] )
36
37 def get_rss( wiki ):
38 """
39 wiki["title"], wiki["feedurl"] -> wiki["rss"]
40 """
41 feed = feedparser.parse(wiki["feedurl"])
42 if feed["bozo"] == 0:
43 wiki["rss"] = feed
44 else:
45 wiki["rss"] = "skipped"
46 return feed["bozo"]
47
48
49
50 def process(request, formatter, lines):
51 # Convert "lines" to "wiki":
52 # [{"title":..., "feedurl":...},...]
53
54 lines = lines[1:] # first line is "URC" invocation
55 pairs = bundle_into_pairs( lines )
56 wiki_list = []
57 for (title,feedurl) in pairs:
58 wiki_list.append( { "title":title,
59 "feedurl":feedurl,
60 "rss":None } )
61
62 # For each wiki, make a Futures call
63 future_calls = [Future(get_rss,wiki) for wiki in wiki_list]
64 # (block until all results)
65 failures=0
66 for fut in future_calls: # don't use the word futures- that's the module
67 result = fut()
68 failures=failures + result # OK=0, ERROR=1
69
70 # Aggregate
71 # "* (hh:mm) (diff) (history) WikiName:PageName -- summary"
72 # (date_tuple,diff_link,hist_link,wiki_name,page_name,summary)
73 entries = []
74 one_week_ago_in_seconds = time.time() - (60*60*24*7)
75 one_week_ago = time.gmtime( one_week_ago_in_seconds )
76 for wiki in wiki_list:
77 if wiki["rss"] != None:
78 for rss_entry in wiki["rss"]["items"]:
79 if rss_entry["date_parsed"] < one_week_ago:
80 continue
81 entries.append(
82 (rss_entry["date_parsed"], # (2004,02,13,...)
83 rss_entry["wiki_diff"], # URL
84 rss_entry["wiki_history"], # URL
85 wiki["title"], # alternatively: wiki["rss"]["channel"]["title"]
86 wiki["rss"]["channel"]["link"], # URL to human RecentChanges
87 rss_entry["title"], # "FrontPage"
88 rss_entry["link"], # URL to page (ie, "FrontPage")
89 rss_entry.get("summary","") ) ) # "Just rewrote the front page..."
90
91 # Sort - decorate sort undecorate
92 decorated = [(entry[0],entry) for entry in entries]
93 decorated.sort()
94 decorated.reverse()
95 sorted = [entry for (date,entry) in decorated]
96
97 # Present
98 result = []
99 result.append( formatter.bullet_list(1) )
100 for (date_tuple,diff_link,hist_link,
101 wiki_name,rc_link,page_name,page_link,summary) in sorted:
102 result.append( formatter.listitem(1) )
103 result.append( "(%2.2d/%2.2d " % date_tuple[1:3] ) # mm/dd
104 result.append( "%2.2d:%2.2d) " % date_tuple[3:5] ) # hh:mm
105 result.append( formatter.url( diff_link, "(diff)" ) )
106 result.append( " " )
107 result.append( formatter.url( hist_link, "(history)" ) )
108 result.append( " " )
109 result.append( formatter.url( rc_link, wiki_name ) )
110 result.append( ":" )
111 result.append( formatter.url( page_link, page_name ) )
112 result.append( " - " )
113 result.append( summary )
114 result.append( formatter.listitem(0) )
115 result.append( formatter.bullet_list(0) )
116 # TO DO: ADD NOTES ON FAILURES TO READ.
117 request.write( ("".join( result )).encode("UTF-8") )