Attachment 'time_cache_2.py'
Download 1 """
2 Time getting acl from a pickle
3
4 Usage: time_cache.py count
5 """
6
7 try:
8 import cPickle as pickle
9 except ImportError:
10 import pickle
11
12 # Set pickle protocol, see http://docs.python.org/lib/node64.html
13 try:
14 # Requires 2.3
15 PICKLE_PROTOCOL = pickle.HIGHEST_PROTOCOL
16 except AttributeError:
17 # Use protocol 1, binary format compatible with all python versions
18 PICKLE_PROTOCOL = 1
19
20 import time
21 import sys
22 import os
23 import dircache
24 import random
25 import sha
26 import base64
27
28 random.seed()
29
30 READ_NA = 0x1
31 READ_ON = 0x2
32 WRITE_NA = 0x4
33 WRITE_ON = 0x8
34 DELETE_NA = 0x16
35 DELETE_ON = 0x32
36 REVERT_NA = 0x64
37 REVERT_ON = 0x128
38 ADMIN_NA = 0x256
39 ADMIN_ON = 0x512
40
41 ACL = [
42 # Entry, rightsdict
43 #+WikiAdmin:read,write,delete,revert,admin
44 ('WikiAdmin', READ_ON | WRITE_ON | DELETE_ON | REVERT_ON | ADMIN_ON),
45 # Example of a modifier
46 # +EditorsGroup:read,write,delete,revert
47 ('EditorsGroup', READ_ON | WRITE_ON | DELETE_ON | REVERT_ON | ADMIN_NA),
48 ('All', READ_ON),
49 ## ('WikiAdmin', {'read': 1, 'write': 1, 'delete': 1, 'revert': 1, 'admin': 1}),
50 ## ('EditorsGroup', {'read': 1, 'write': 1, 'delete': 1, 'revert': 1, 'admin': 0}),
51 ## ('All', {'read': 1, 'write': 0, 'delete': 0, 'revert': 0, 'admin': 0}),
52 ]
53
54
55 def set_cache(key, value):
56 value = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
57 try:
58 f = file(os.path.join('cache', key), 'w')
59 f.write(value)
60 finally:
61 f.close()
62
63
64 def get_cache(key):
65 try:
66 f = file(os.path.join('cache', key))
67 value = f.read()
68 finally:
69 f.close()
70 value = pickle.loads(value)
71 return value
72
73
74 def make_pages(count=1000):
75 """ Make page directories
76
77 Assume use of hierarchiacl acl, only 20% of page has acl
78 """
79 clean()
80
81 os.mkdir('pages')
82 os.mkdir('cache')
83
84 for i in range(count):
85 # create random names to remove the effect of disk caching
86 name = '%s.%s' % (time.time(), random.randint(1, sys.maxint))
87 path = os.path.join('pages', name, 'cache')
88 os.makedirs(path)
89 metafile = os.path.join(path, 'meta')
90 meta = {}
91
92 # About 20% precent of pages have acl
93 if random.random() < 0.2:
94 meta['acl'] = ACL
95 else:
96 meta['acl'] = None
97
98 # About 5% page are deleted
99 meta['exists'] = random.random() < 0.95
100
101 # Pages revision
102 meta['revision'] = random.randint(1, 100)
103
104 pickle.dump(meta, file(metafile, 'w'), pickle.HIGHEST_PROTOCOL)
105
106
107 def make_meta_cache():
108 """ Get meta data from all pages and cache in one file """
109 start = time.time()
110 names = [name for name in dircache.listdir('pages')
111 if not name.startswith('.')]
112 meta = {}
113 for name in names:
114 path = os.path.join('pages', name, 'cache', 'meta')
115 try:
116 f = file(path)
117 value = pickle.load(f)
118 finally:
119 f.close()
120 meta[name] = (value['exists'], value['acl'])
121
122 set_cache('meta', meta)
123 print ' Create meta cache: %.8f' % (time.time() - start)
124
125
126 def get_meta():
127 start = time.time()
128 meta = get_cache('meta')
129 print ' Get meta from cache: %.8f' % (time.time() - start)
130 return meta
131
132
133 def set_meta(key, value):
134 meta = get_cache('meta')
135 meta[key] = value
136 set_cache('meta', meta)
137
138
139 def edit_meta():
140 start = time.time()
141 # Example: page deleted, use old revision acl
142 set_meta('%016d' % 1, (0, ACL))
143 print ' Edit acl cache: %.8f' % (time.time() - start)
144
145
146 def clean():
147 os.system('rm -rf pages')
148 os.system('rm -rf cache')
149
150
151 if __name__ == '__main__':
152
153 try:
154 count = int(sys.argv[1])
155 print 'Test cache for %d pages:' % count
156 make_pages(count)
157 make_meta_cache()
158 get_meta()
159 edit_meta()
160 clean()
161 except (IndexError, ValueError):
162 print __doc__
163
164
Attached Files
To refer to attachments on a page, use attachment:filename, as shown below in the list of files. Do NOT use the URL of the [get] link, since this is subject to change and can break easily.You are not allowed to attach a file to this page.