mercurial/branchmap.py
changeset 23786 7d63398fbfd1
parent 23785 cb99bacb9b4e
child 23819 6bf93440a717
equal deleted inserted replaced
23785:cb99bacb9b4e 23786:7d63398fbfd1
   132         # heads.
   132         # heads.
   133         if closednodes is None:
   133         if closednodes is None:
   134             self._closednodes = set()
   134             self._closednodes = set()
   135         else:
   135         else:
   136             self._closednodes = closednodes
   136             self._closednodes = closednodes
       
   137         self._revbranchcache = None
   137 
   138 
   138     def _hashfiltered(self, repo):
   139     def _hashfiltered(self, repo):
   139         """build hash of revision filtered in the current cache
   140         """build hash of revision filtered in the current cache
   140 
   141 
   141         Tracking tipnode and tiprev is not enough to ensure validity of the
   142         Tracking tipnode and tiprev is not enough to ensure validity of the
   223                         repo.filtername, len(self), nodecount)
   224                         repo.filtername, len(self), nodecount)
   224         except (IOError, OSError, util.Abort), inst:
   225         except (IOError, OSError, util.Abort), inst:
   225             repo.ui.debug("couldn't write branch cache: %s\n" % inst)
   226             repo.ui.debug("couldn't write branch cache: %s\n" % inst)
   226             # Abort may be raise by read only opener
   227             # Abort may be raise by read only opener
   227             pass
   228             pass
       
   229         if self._revbranchcache:
       
   230             self._revbranchcache.write(repo.unfiltered())
       
   231             self._revbranchcache = None
   228 
   232 
   229     def update(self, repo, revgen):
   233     def update(self, repo, revgen):
   230         """Given a branchhead cache, self, that may have extra nodes or be
   234         """Given a branchhead cache, self, that may have extra nodes or be
   231         missing heads, and a generator of nodes that are strictly a superset of
   235         missing heads, and a generator of nodes that are strictly a superset of
   232         heads missing, this function updates self to be correct.
   236         heads missing, this function updates self to be correct.
   233         """
   237         """
   234         starttime = time.time()
   238         starttime = time.time()
   235         cl = repo.changelog
   239         cl = repo.changelog
   236         # collect new branch entries
   240         # collect new branch entries
   237         newbranches = {}
   241         newbranches = {}
   238         getbranchinfo = cl.branchinfo
   242         urepo = repo.unfiltered()
       
   243         self._revbranchcache = revbranchcache(urepo)
       
   244         getbranchinfo = self._revbranchcache.branchinfo
       
   245         ucl = urepo.changelog
   239         for r in revgen:
   246         for r in revgen:
   240             branch, closesbranch = getbranchinfo(r)
   247             branch, closesbranch = getbranchinfo(ucl, r)
   241             newbranches.setdefault(branch, []).append(r)
   248             newbranches.setdefault(branch, []).append(r)
   242             if closesbranch:
   249             if closesbranch:
   243                 self._closednodes.add(cl.node(r))
   250                 self._closednodes.add(cl.node(r))
   244 
   251 
   245         # fetch current topological heads to speed up filtering
   252         # fetch current topological heads to speed up filtering
   359         if len(self._rbcrevs) < rbcrevidx + _rbcrecsize:
   366         if len(self._rbcrevs) < rbcrevidx + _rbcrecsize:
   360             first = len(self._rbcrevs) // _rbcrecsize
   367             first = len(self._rbcrevs) // _rbcrecsize
   361             self._rbcrevs.extend('\0' * (len(changelog) * _rbcrecsize -
   368             self._rbcrevs.extend('\0' * (len(changelog) * _rbcrecsize -
   362                                          len(self._rbcrevs)))
   369                                          len(self._rbcrevs)))
   363             for r in xrange(first, len(changelog)):
   370             for r in xrange(first, len(changelog)):
   364                 self._branchinfo(r)
   371                 self._branchinfo(changelog, r)
   365 
   372 
   366         # fast path: extract data from cache, use it if node is matching
   373         # fast path: extract data from cache, use it if node is matching
   367         reponode = changelog.node(rev)[:_rbcnodelen]
   374         reponode = changelog.node(rev)[:_rbcnodelen]
   368         cachenode, branchidx = unpack(
   375         cachenode, branchidx = unpack(
   369             _rbcrecfmt, buffer(self._rbcrevs, rbcrevidx, _rbcrecsize))
   376             _rbcrecfmt, buffer(self._rbcrevs, rbcrevidx, _rbcrecsize))
   372             branchidx &= _rbcbranchidxmask
   379             branchidx &= _rbcbranchidxmask
   373         if cachenode == reponode:
   380         if cachenode == reponode:
   374             return self._names[branchidx], close
   381             return self._names[branchidx], close
   375         # fall back to slow path and make sure it will be written to disk
   382         # fall back to slow path and make sure it will be written to disk
   376         self._rbcrevslen = min(self._rbcrevslen, rev)
   383         self._rbcrevslen = min(self._rbcrevslen, rev)
   377         return self._branchinfo(rev)
   384         return self._branchinfo(changelog, rev)
   378 
   385 
   379     def _branchinfo(self, changelog, rev):
   386     def _branchinfo(self, changelog, rev):
   380         """Retrieve branch info from changelog and update _rbcrevs"""
   387         """Retrieve branch info from changelog and update _rbcrevs"""
   381         b, close = changelog.branchinfo(rev)
   388         b, close = changelog.branchinfo(rev)
   382         if b in self._namesreverse:
   389         if b in self._namesreverse: