159 - precursors: old -> set(new) |
159 - precursors: old -> set(new) |
160 - successors: new -> set(old) |
160 - successors: new -> set(old) |
161 """ |
161 """ |
162 |
162 |
163 def __init__(self, sopener): |
163 def __init__(self, sopener): |
|
164 # caches for various obsolescence related cache |
|
165 self.caches = {} |
164 self._all = [] |
166 self._all = [] |
165 # new markers to serialize |
167 # new markers to serialize |
166 self.precursors = {} |
168 self.precursors = {} |
167 self.successors = {} |
169 self.successors = {} |
168 self.sopener = sopener |
170 self.sopener = sopener |
218 finally: |
220 finally: |
219 # XXX: f.close() == filecache invalidation == obsstore rebuilt. |
221 # XXX: f.close() == filecache invalidation == obsstore rebuilt. |
220 # call 'filecacheentry.refresh()' here |
222 # call 'filecacheentry.refresh()' here |
221 f.close() |
223 f.close() |
222 self._load(new) |
224 self._load(new) |
|
225 # new marker *may* have changed several set. invalidate the cache. |
|
226 self.caches.clear() |
223 return len(new) |
227 return len(new) |
224 |
228 |
225 def mergemarkers(self, transation, data): |
229 def mergemarkers(self, transation, data): |
226 markers = _readmarkers(data) |
230 markers = _readmarkers(data) |
227 self.add(transation, markers) |
231 self.add(transation, markers) |
325 for mark in obsstore.precursors.get(current, ()): |
329 for mark in obsstore.precursors.get(current, ()): |
326 for suc in mark[1]: |
330 for suc in mark[1]: |
327 if suc not in seen: |
331 if suc not in seen: |
328 seen.add(suc) |
332 seen.add(suc) |
329 remaining.add(suc) |
333 remaining.add(suc) |
|
334 |
|
335 # mapping of 'set-name' -> <function to computer this set> |
|
336 cachefuncs = {} |
|
337 def cachefor(name): |
|
338 """Decorator to register a function as computing the cache for a set""" |
|
339 def decorator(func): |
|
340 assert name not in cachefuncs |
|
341 cachefuncs[name] = func |
|
342 return func |
|
343 return decorator |
|
344 |
|
345 def getobscache(repo, name): |
|
346 """Return the set of revision that belong to the <name> set |
|
347 |
|
348 Such access may compute the set and cache it for future use""" |
|
349 if not repo.obsstore: |
|
350 return () |
|
351 if name not in repo.obsstore.caches: |
|
352 repo.obsstore.caches[name] = cachefuncs[name](repo) |
|
353 return repo.obsstore.caches[name] |
|
354 |
|
355 # To be simple we need to invalidate obsolescence cache when: |
|
356 # |
|
357 # - new changeset is added: |
|
358 # - public phase is changed |
|
359 # - obsolescence marker are added |
|
360 # - strip is used a repo |
|
361 def clearobscaches(repo): |
|
362 """Remove all obsolescence related cache from a repo |
|
363 |
|
364 This remove all cache in obsstore is the obsstore already exist on the |
|
365 repo. |
|
366 |
|
367 (We could be smarter here given the exact event that trigger the cache |
|
368 clearing)""" |
|
369 # only clear cache is there is obsstore data in this repo |
|
370 if 'obsstore' in repo._filecache: |
|
371 repo.obsstore.caches.clear() |
|
372 |
|
373 @cachefor('obsolete') |
|
374 def _computeobsoleteset(repo): |
|
375 """the set of obsolete revisions""" |
|
376 obs = set() |
|
377 nm = repo.changelog.nodemap |
|
378 for prec in repo.obsstore.precursors: |
|
379 rev = nm.get(prec) |
|
380 if rev is not None: |
|
381 obs.add(rev) |
|
382 return set(repo.revs('%ld - public()', obs)) |
|
383 |
|
384 @cachefor('unstable') |
|
385 def _computeunstableset(repo): |
|
386 """the set of non obsolete revisions with obsolete parents""" |
|
387 return set(repo.revs('(obsolete()::) - obsolete()')) |
|
388 |
|
389 @cachefor('suspended') |
|
390 def _computesuspendedset(repo): |
|
391 """the set of obsolete parents with non obsolete descendants""" |
|
392 return set(repo.revs('obsolete() and obsolete()::unstable()')) |
|
393 |
|
394 @cachefor('extinct') |
|
395 def _computeextinctset(repo): |
|
396 """the set of obsolete parents without non obsolete descendants""" |
|
397 return set(repo.revs('obsolete() - obsolete()::unstable()')) |