1 # Copyright (C) 2007, One Laptop Per Child
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 2 of the License, or
6 # (at your option) any later version.
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
13 # You should have received a copy of the GNU General Public License
14 # along with this program; if not, write to the Free Software
15 # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
19 from sugar
.datastore
import datastore
21 # Properties the journal cares about.
22 PROPERTIES
= ['uid', 'title', 'mtime', 'timestamp', 'keep', 'buddies',
23 'icon-color', 'mime_type', 'progress', 'activity', 'mountpoint',
28 __gtype_name__
= 'query_Cache'
30 def __init__(self
, jobjects
=None):
33 if jobjects
is not None:
34 self
.append_all(jobjects
)
36 def prepend_all(self
, jobjects
):
37 for jobject
in jobjects
[::-1]:
38 self
._array
.insert(0, jobject
)
39 self
._dict
[jobject
.object_id
] = jobject
41 def append_all(self
, jobjects
):
42 for jobject
in jobjects
:
43 self
._array
.append(jobject
)
44 self
._dict
[jobject
.object_id
] = jobject
46 def remove_all(self
, jobjects
):
47 jobjects
= jobjects
[:]
48 for jobject
in jobjects
:
49 obj
= self
._dict
[jobject
.object_id
]
50 self
._array
.remove(obj
)
51 del self
._dict
[obj
.object_id
]
55 return len(self
._array
)
57 def __getitem__(self
, key
):
58 if isinstance(key
, basestring
):
59 return self
._dict
[key
]
61 return self
._array
[key
]
64 self
._destroy
_jobjects
(self
._array
)
68 def _destroy_jobjects(self
, jobjects
):
69 for jobject
in jobjects
:
72 class ResultSet(object):
76 def __init__(self
, query
, sorting
):
77 self
._total
_count
= -1
80 self
._sorting
= sorting
83 self
._cache
= _Cache()
89 if self
._total
_count
== -1:
90 jobjects
, self
._total
_count
= datastore
.find(self
._query
,
91 sorting
=self
._sorting
,
92 limit
=ResultSet
._CACHE
_LIMIT
,
93 properties
=PROPERTIES
)
94 self
._cache
.append_all(jobjects
)
96 return self
._total
_count
98 length
= property(get_length
)
100 def seek(self
, position
):
101 self
._position
= position
103 def read(self
, max_count
):
104 logging
.debug('ResultSet.read position: %r' % self
._position
)
106 if max_count
* 5 > ResultSet
._CACHE
_LIMIT
:
107 raise RuntimeError('max_count (%i) too big for ResultSet._CACHE_LIMIT'
108 ' (%i).' % (max_count
, ResultSet
._CACHE
_LIMIT
))
110 if self
._position
== -1:
113 if self
._position
< self
._offset
:
114 remaining_forward_entries
= 0
116 remaining_forward_entries
= self
._offset
+ len(self
._cache
) - \
119 if self
._position
> self
._offset
+ len(self
._cache
):
120 remaining_backwards_entries
= 0
122 remaining_backwards_entries
= self
._position
- self
._offset
124 last_cached_entry
= self
._offset
+ len(self
._cache
)
126 if (remaining_forward_entries
<= 0 and remaining_backwards_entries
<= 0) or \
127 max_count
> ResultSet
._CACHE
_LIMIT
:
129 # Total cache miss: remake it
130 offset
= max(0, self
._position
- max_count
)
131 logging
.debug('remaking cache, offset: %r limit: %r' % (offset
, max_count
* 2))
132 jobjects
, self
._total
_count
= datastore
.find(self
._query
,
133 sorting
=self
._sorting
,
135 limit
=ResultSet
._CACHE
_LIMIT
,
136 properties
=PROPERTIES
)
138 self
._cache
.remove_all(self
._cache
)
139 self
._cache
.append_all(jobjects
)
140 self
._offset
= offset
142 elif remaining_forward_entries
< 2 * max_count
and \
143 last_cached_entry
< self
._total
_count
:
145 # Add one page to the end of cache
146 logging
.debug('appending one more page, offset: %r' % last_cached_entry
)
147 jobjects
, self
._total
_count
= datastore
.find(self
._query
,
148 sorting
=self
._sorting
,
149 offset
=last_cached_entry
,
151 properties
=PROPERTIES
)
153 self
._cache
.append_all(jobjects
)
155 # apply the cache limit
156 objects_excess
= len(self
._cache
) - ResultSet
._CACHE
_LIMIT
157 if objects_excess
> 0:
158 self
._offset
+= objects_excess
159 self
._cache
.remove_all(self
._cache
[:objects_excess
])
161 elif remaining_backwards_entries
< 2 * max_count
and self
._offset
> 0:
163 # Add one page to the beginning of cache
164 limit
= min(self
._offset
, max_count
)
165 self
._offset
= max(0, self
._offset
- max_count
)
167 logging
.debug('prepending one more page, offset: %r limit: %r' %
168 (self
._offset
, limit
))
169 jobjects
, self
._total
_count
= datastore
.find(self
._query
,
170 sorting
=self
._sorting
,
173 properties
=PROPERTIES
)
176 self
._cache
.prepend_all(jobjects
)
178 # apply the cache limit
179 objects_excess
= len(self
._cache
) - ResultSet
._CACHE
_LIMIT
180 if objects_excess
> 0:
181 self
._cache
.remove_all(self
._cache
[-objects_excess
:])
183 logging
.debug('cache hit and no need to grow the cache')
185 first_pos
= self
._position
- self
._offset
186 last_pos
= self
._position
- self
._offset
+ max_count
187 return self
._cache
[first_pos
:last_pos
]
189 def find(query
, sorting
=['-mtime']):
190 result_set
= ResultSet(query
, sorting
)
193 if __name__
== "__main__":
197 def mock_debug(string
):
198 print "\tDEBUG: %s" % string
199 logging
.debug
= mock_debug
201 def mock_find(query
, sorting
=None, limit
=None, offset
=None, properties
=[]):
202 print "mock_find %r %r" % (offset
, (offset
+ limit
))
204 if limit
is None or offset
is None:
205 raise RuntimeError("Unimplemented test.")
208 for index
in range(offset
, offset
+ limit
):
209 obj
= datastore
.DSObject(index
, datastore
.DSMetadata({}), '')
212 return result
, TOTAL_ITEMS
213 datastore
.find
= mock_find
215 result_set
= find({})
217 print "Get first page"
218 objects
= result_set
.read(SCREEN_SIZE
)
219 print [obj
.object_id
for obj
in objects
]
220 assert range(0, SCREEN_SIZE
) == [obj
.object_id
for obj
in objects
]
223 print "Scroll to 5th item"
225 objects
= result_set
.read(SCREEN_SIZE
)
226 print [obj
.object_id
for obj
in objects
]
227 assert range(5, SCREEN_SIZE
+ 5) == [obj
.object_id
for obj
in objects
]
230 print "Scroll back to beginning"
232 objects
= result_set
.read(SCREEN_SIZE
)
233 print [obj
.object_id
for obj
in objects
]
234 assert range(0, SCREEN_SIZE
) == [obj
.object_id
for obj
in objects
]
237 print "Hit PgDn five times"
238 for i
in range(0, 5):
239 result_set
.seek((i
+ 1) * SCREEN_SIZE
)
240 objects
= result_set
.read(SCREEN_SIZE
)
241 print [obj
.object_id
for obj
in objects
]
242 assert range((i
+ 1) * SCREEN_SIZE
, (i
+ 2) * SCREEN_SIZE
) == [obj
.object_id
for obj
in objects
]
245 print "Hit PgUp five times"
246 for i
in range(0, 5)[::-1]:
247 result_set
.seek(i
* SCREEN_SIZE
)
248 objects
= result_set
.read(SCREEN_SIZE
)
249 print [obj
.object_id
for obj
in objects
]
250 assert range(i
* SCREEN_SIZE
, (i
+ 1) * SCREEN_SIZE
) == [obj
.object_id
for obj
in objects
]