Attachment 'time_cache.py'
Download 1 """
2 Time getting acl from a pickle
3
4 Usage: time_cache.py count
5 """
6
7 try:
8 import cPickle as pickle
9 except ImportError:
10 import pickle
11
12 # Set pickle protocol, see http://docs.python.org/lib/node64.html
13 try:
14 # Requires 2.3
15 PICKLE_PROTOCOL = pickle.HIGHEST_PROTOCOL
16 except AttributeError:
17 # Use protocol 1, binary format compatible with all python versions
18 PICKLE_PROTOCOL = 1
19
20 import time
21 import sys
22 import os
23 import dircache
24 import random
25 import sha
26 import base64
27
28 random.seed()
29
30 ACL = [
31 # Entry, rightsdict
32 ('WikiAdmin', {'read': 1, 'write': 1, 'delete': 1, 'revert': 1, 'admin': 1}),
33 ('EditorsGroup', {'read': 1, 'write': 1, 'delete': 1, 'revert': 1, 'admin': 0}),
34 ('All', {'read': 1, 'write': 0, 'delete': 0, 'revert': 0, 'admin': 0}),
35 ]
36
37
38 def set_cache(key, value):
39 value = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
40 try:
41 f = file(os.path.join('cache', key), 'w')
42 f.write(value)
43 finally:
44 f.close()
45
46
47 def get_cache(key):
48 try:
49 f = file(os.path.join('cache', key))
50 value = f.read()
51 finally:
52 f.close()
53 value = pickle.loads(value)
54 return value
55
56
57 def make_pages(count=1000):
58 """ Make page directories
59
60 Assume use of hierarchiacl acl, only 20% of page has acl
61 """
62 clean()
63
64 os.mkdir('pages')
65 os.mkdir('cache')
66
67 for i in range(count):
68 # create random names to remove the effect of disk caching
69 name = '%s.%s' % (time.time(), random.randint(1, sys.maxint))
70 path = os.path.join('pages', name, 'cache')
71 os.makedirs(path)
72 metafile = os.path.join(path, 'meta')
73 meta = {}
74
75 # About 20% precent of pages have acl
76 if random.random() < 0.2:
77 meta['acl'] = ACL
78 else:
79 meta['acl'] = None
80
81 # About 5% page are deleted
82 meta['exists'] = random.random() < 0.95
83
84 # Pages revision
85 meta['revision'] = random.randint(1, 100)
86
87 pickle.dump(meta, file(metafile, 'w'), pickle.HIGHEST_PROTOCOL)
88
89
90 def make_meta_cache():
91 """ Get meta data from all pages and cache in one file """
92 start = time.time()
93 names = [name for name in dircache.listdir('pages')
94 if not name.startswith('.')]
95 meta = {}
96 for name in names:
97 path = os.path.join('pages', name, 'cache', 'meta')
98 try:
99 f = file(path)
100 value = pickle.load(f)
101 finally:
102 f.close()
103 meta[name] = (value['exists'], value['acl'])
104
105 set_cache('meta', meta)
106 print ' Create meta cache: %.8f' % (time.time() - start)
107
108
109 def get_meta():
110 start = time.time()
111 meta = get_cache('meta')
112 print ' Get meta from cache: %.8f' % (time.time() - start)
113 return meta
114
115
116 def set_meta(key, value):
117 meta = get_cache('meta')
118 meta[key] = value
119 set_cache('meta', meta)
120
121
122 def edit_meta():
123 start = time.time()
124 # Example: page deleted, use old revision acl
125 set_meta('%016d' % 1, (0, ACL))
126 print ' Edit acl cache: %.8f' % (time.time() - start)
127
128
129 def clean():
130 os.system('rm -rf pages')
131 os.system('rm -rf cache')
132
133
134 if __name__ == '__main__':
135
136 try:
137 count = int(sys.argv[1])
138 print 'Test cache for %d pages:' % count
139 make_pages(count)
140 make_meta_cache()
141 get_meta()
142 edit_meta()
143 clean()
144 except (IndexError, ValueError):
145 print __doc__
146
147
Attached Files
To refer to attachments on a page, use attachment:filename, as shown below in the list of files. Do NOT use the URL of the [get] link, since this is subject to change and can break easily.You are not allowed to attach a file to this page.