mercurial/obsutil.py
changeset 43077 687b865b95ad
parent 43076 2372284d9457
child 43106 d783f945a701
equal deleted inserted replaced
43076:2372284d9457 43077:687b865b95ad
   321     augmented with obsolescence information.
   321     augmented with obsolescence information.
   322 
   322 
   323     Beware that possible obsolescence cycle may result if complex situation.
   323     Beware that possible obsolescence cycle may result if complex situation.
   324     """
   324     """
   325     repo = repo.unfiltered()
   325     repo = repo.unfiltered()
   326     foreground = set(repo.set('%ln::', nodes))
   326     foreground = set(repo.set(b'%ln::', nodes))
   327     if repo.obsstore:
   327     if repo.obsstore:
   328         # We only need this complicated logic if there is obsolescence
   328         # We only need this complicated logic if there is obsolescence
   329         # XXX will probably deserve an optimised revset.
   329         # XXX will probably deserve an optimised revset.
   330         nm = repo.changelog.nodemap
   330         nm = repo.changelog.nodemap
   331         plen = -1
   331         plen = -1
   334             plen = len(foreground)
   334             plen = len(foreground)
   335             succs = set(c.node() for c in foreground)
   335             succs = set(c.node() for c in foreground)
   336             mutable = [c.node() for c in foreground if c.mutable()]
   336             mutable = [c.node() for c in foreground if c.mutable()]
   337             succs.update(allsuccessors(repo.obsstore, mutable))
   337             succs.update(allsuccessors(repo.obsstore, mutable))
   338             known = (n for n in succs if n in nm)
   338             known = (n for n in succs if n in nm)
   339             foreground = set(repo.set('%ln::', known))
   339             foreground = set(repo.set(b'%ln::', known))
   340     return set(c.node() for c in foreground)
   340     return set(c.node() for c in foreground)
   341 
   341 
   342 
   342 
   343 # effectflag field
   343 # effectflag field
   344 #
   344 #
   353 #
   353 #
   354 # The effect-flag is placed behind an experimental flag
   354 # The effect-flag is placed behind an experimental flag
   355 # `effect-flags` set to off by default.
   355 # `effect-flags` set to off by default.
   356 #
   356 #
   357 
   357 
   358 EFFECTFLAGFIELD = "ef1"
   358 EFFECTFLAGFIELD = b"ef1"
   359 
   359 
   360 DESCCHANGED = 1 << 0  # action changed the description
   360 DESCCHANGED = 1 << 0  # action changed the description
   361 METACHANGED = 1 << 1  # action change the meta
   361 METACHANGED = 1 << 1  # action change the meta
   362 DIFFCHANGED = 1 << 3  # action change diff introduced by the changeset
   362 DIFFCHANGED = 1 << 3  # action change diff introduced by the changeset
   363 PARENTCHANGED = 1 << 2  # action change the parent
   363 PARENTCHANGED = 1 << 2  # action change the parent
   364 USERCHANGED = 1 << 4  # the user changed
   364 USERCHANGED = 1 << 4  # the user changed
   365 DATECHANGED = 1 << 5  # the date changed
   365 DATECHANGED = 1 << 5  # the date changed
   366 BRANCHCHANGED = 1 << 6  # the branch changed
   366 BRANCHCHANGED = 1 << 6  # the branch changed
   367 
   367 
   368 METABLACKLIST = [
   368 METABLACKLIST = [
   369     re.compile('^branch$'),
   369     re.compile(b'^branch$'),
   370     re.compile('^.*-source$'),
   370     re.compile(b'^.*-source$'),
   371     re.compile('^.*_source$'),
   371     re.compile(b'^.*_source$'),
   372     re.compile('^source$'),
   372     re.compile(b'^source$'),
   373 ]
   373 ]
   374 
   374 
   375 
   375 
   376 def metanotblacklisted(metaitem):
   376 def metanotblacklisted(metaitem):
   377     """ Check that the key of a meta item (extrakey, extravalue) does not
   377     """ Check that the key of a meta item (extrakey, extravalue) does not
   406 def _cmpdiff(leftctx, rightctx):
   406 def _cmpdiff(leftctx, rightctx):
   407     """return True if both ctx introduce the "same diff"
   407     """return True if both ctx introduce the "same diff"
   408 
   408 
   409     This is a first and basic implementation, with many shortcoming.
   409     This is a first and basic implementation, with many shortcoming.
   410     """
   410     """
   411     diffopts = diffutil.diffallopts(leftctx.repo().ui, {'git': True})
   411     diffopts = diffutil.diffallopts(leftctx.repo().ui, {b'git': True})
   412 
   412 
   413     # Leftctx or right ctx might be filtered, so we need to use the contexts
   413     # Leftctx or right ctx might be filtered, so we need to use the contexts
   414     # with an unfiltered repository to safely compute the diff
   414     # with an unfiltered repository to safely compute the diff
   415 
   415 
   416     # leftctx and rightctx can be from different repository views in case of
   416     # leftctx and rightctx can be from different repository views in case of
   479     """return the set of pre-existing revisions obsoleted by a transaction"""
   479     """return the set of pre-existing revisions obsoleted by a transaction"""
   480     torev = repo.unfiltered().changelog.nodemap.get
   480     torev = repo.unfiltered().changelog.nodemap.get
   481     phase = repo._phasecache.phase
   481     phase = repo._phasecache.phase
   482     succsmarkers = repo.obsstore.successors.get
   482     succsmarkers = repo.obsstore.successors.get
   483     public = phases.public
   483     public = phases.public
   484     addedmarkers = tr.changes['obsmarkers']
   484     addedmarkers = tr.changes[b'obsmarkers']
   485     origrepolen = tr.changes['origrepolen']
   485     origrepolen = tr.changes[b'origrepolen']
   486     seenrevs = set()
   486     seenrevs = set()
   487     obsoleted = set()
   487     obsoleted = set()
   488     for mark in addedmarkers:
   488     for mark in addedmarkers:
   489         node = mark[0]
   489         node = mark[0]
   490         rev = torev(node)
   490         rev = torev(node)
   792             if not foundany:
   792             if not foundany:
   793                 fullsuccessorsets.append(_succs())
   793                 fullsuccessorsets.append(_succs())
   794 
   794 
   795     values = []
   795     values = []
   796     for sset in fullsuccessorsets:
   796     for sset in fullsuccessorsets:
   797         values.append({'successors': sset, 'markers': sset.markers})
   797         values.append({b'successors': sset, b'markers': sset.markers})
   798 
   798 
   799     return values
   799     return values
   800 
   800 
   801 
   801 
   802 def _getobsfate(successorssets):
   802 def _getobsfate(successorssets):
   811     - superseded_split
   811     - superseded_split
   812     """
   812     """
   813 
   813 
   814     if len(successorssets) == 0:
   814     if len(successorssets) == 0:
   815         # The commit has been pruned
   815         # The commit has been pruned
   816         return 'pruned'
   816         return b'pruned'
   817     elif len(successorssets) > 1:
   817     elif len(successorssets) > 1:
   818         return 'diverged'
   818         return b'diverged'
   819     else:
   819     else:
   820         # No divergence, only one set of successors
   820         # No divergence, only one set of successors
   821         successors = successorssets[0]
   821         successors = successorssets[0]
   822 
   822 
   823         if len(successors) == 1:
   823         if len(successors) == 1:
   824             return 'superseded'
   824             return b'superseded'
   825         else:
   825         else:
   826             return 'superseded_split'
   826             return b'superseded_split'
   827 
   827 
   828 
   828 
   829 def obsfateverb(successorset, markers):
   829 def obsfateverb(successorset, markers):
   830     """ Return the verb summarizing the successorset and potentially using
   830     """ Return the verb summarizing the successorset and potentially using
   831     information from the markers
   831     information from the markers
   832     """
   832     """
   833     if not successorset:
   833     if not successorset:
   834         verb = 'pruned'
   834         verb = b'pruned'
   835     elif len(successorset) == 1:
   835     elif len(successorset) == 1:
   836         verb = 'rewritten'
   836         verb = b'rewritten'
   837     else:
   837     else:
   838         verb = 'split'
   838         verb = b'split'
   839     return verb
   839     return verb
   840 
   840 
   841 
   841 
   842 def markersdates(markers):
   842 def markersdates(markers):
   843     """returns the list of dates for a list of markers
   843     """returns the list of dates for a list of markers
   848 def markersusers(markers):
   848 def markersusers(markers):
   849     """ Returns a sorted list of markers users without duplicates
   849     """ Returns a sorted list of markers users without duplicates
   850     """
   850     """
   851     markersmeta = [dict(m[3]) for m in markers]
   851     markersmeta = [dict(m[3]) for m in markers]
   852     users = set(
   852     users = set(
   853         encoding.tolocal(meta['user'])
   853         encoding.tolocal(meta[b'user'])
   854         for meta in markersmeta
   854         for meta in markersmeta
   855         if meta.get('user')
   855         if meta.get(b'user')
   856     )
   856     )
   857 
   857 
   858     return sorted(users)
   858     return sorted(users)
   859 
   859 
   860 
   860 
   861 def markersoperations(markers):
   861 def markersoperations(markers):
   862     """ Returns a sorted list of markers operations without duplicates
   862     """ Returns a sorted list of markers operations without duplicates
   863     """
   863     """
   864     markersmeta = [dict(m[3]) for m in markers]
   864     markersmeta = [dict(m[3]) for m in markers]
   865     operations = set(
   865     operations = set(
   866         meta.get('operation') for meta in markersmeta if meta.get('operation')
   866         meta.get(b'operation') for meta in markersmeta if meta.get(b'operation')
   867     )
   867     )
   868 
   868 
   869     return sorted(operations)
   869     return sorted(operations)
   870 
   870 
   871 
   871 
   883     line.append(obsfateverb(successors, markers))
   883     line.append(obsfateverb(successors, markers))
   884 
   884 
   885     # Operations
   885     # Operations
   886     operations = markersoperations(markers)
   886     operations = markersoperations(markers)
   887     if operations:
   887     if operations:
   888         line.append(" using %s" % ", ".join(operations))
   888         line.append(b" using %s" % b", ".join(operations))
   889 
   889 
   890     # Successors
   890     # Successors
   891     if successors:
   891     if successors:
   892         fmtsuccessors = [formatctx(repo[succ]) for succ in successors]
   892         fmtsuccessors = [formatctx(repo[succ]) for succ in successors]
   893         line.append(" as %s" % ", ".join(fmtsuccessors))
   893         line.append(b" as %s" % b", ".join(fmtsuccessors))
   894 
   894 
   895     # Users
   895     # Users
   896     users = markersusers(markers)
   896     users = markersusers(markers)
   897     # Filter out current user in not verbose mode to reduce amount of
   897     # Filter out current user in not verbose mode to reduce amount of
   898     # information
   898     # information
   900         currentuser = ui.username(acceptempty=True)
   900         currentuser = ui.username(acceptempty=True)
   901         if len(users) == 1 and currentuser in users:
   901         if len(users) == 1 and currentuser in users:
   902             users = None
   902             users = None
   903 
   903 
   904     if (verbose or normal) and users:
   904     if (verbose or normal) and users:
   905         line.append(" by %s" % ", ".join(users))
   905         line.append(b" by %s" % b", ".join(users))
   906 
   906 
   907     # Date
   907     # Date
   908     dates = markersdates(markers)
   908     dates = markersdates(markers)
   909 
   909 
   910     if dates and verbose:
   910     if dates and verbose:
   911         min_date = min(dates)
   911         min_date = min(dates)
   912         max_date = max(dates)
   912         max_date = max(dates)
   913 
   913 
   914         if min_date == max_date:
   914         if min_date == max_date:
   915             fmtmin_date = dateutil.datestr(min_date, '%Y-%m-%d %H:%M %1%2')
   915             fmtmin_date = dateutil.datestr(min_date, b'%Y-%m-%d %H:%M %1%2')
   916             line.append(" (at %s)" % fmtmin_date)
   916             line.append(b" (at %s)" % fmtmin_date)
   917         else:
   917         else:
   918             fmtmin_date = dateutil.datestr(min_date, '%Y-%m-%d %H:%M %1%2')
   918             fmtmin_date = dateutil.datestr(min_date, b'%Y-%m-%d %H:%M %1%2')
   919             fmtmax_date = dateutil.datestr(max_date, '%Y-%m-%d %H:%M %1%2')
   919             fmtmax_date = dateutil.datestr(max_date, b'%Y-%m-%d %H:%M %1%2')
   920             line.append(" (between %s and %s)" % (fmtmin_date, fmtmax_date))
   920             line.append(b" (between %s and %s)" % (fmtmin_date, fmtmax_date))
   921 
   921 
   922     return "".join(line)
   922     return b"".join(line)
   923 
   923 
   924 
   924 
   925 filteredmsgtable = {
   925 filteredmsgtable = {
   926     "pruned": _("hidden revision '%s' is pruned"),
   926     b"pruned": _(b"hidden revision '%s' is pruned"),
   927     "diverged": _("hidden revision '%s' has diverged"),
   927     b"diverged": _(b"hidden revision '%s' has diverged"),
   928     "superseded": _("hidden revision '%s' was rewritten as: %s"),
   928     b"superseded": _(b"hidden revision '%s' was rewritten as: %s"),
   929     "superseded_split": _("hidden revision '%s' was split as: %s"),
   929     b"superseded_split": _(b"hidden revision '%s' was split as: %s"),
   930     "superseded_split_several": _(
   930     b"superseded_split_several": _(
   931         "hidden revision '%s' was split as: %s and " "%d more"
   931         b"hidden revision '%s' was split as: %s and " b"%d more"
   932     ),
   932     ),
   933 }
   933 }
   934 
   934 
   935 
   935 
   936 def _getfilteredreason(repo, changeid, ctx):
   936 def _getfilteredreason(repo, changeid, ctx):
   938     """
   938     """
   939     successors = successorssets(repo, ctx.node())
   939     successors = successorssets(repo, ctx.node())
   940     fate = _getobsfate(successors)
   940     fate = _getobsfate(successors)
   941 
   941 
   942     # Be more precise in case the revision is superseded
   942     # Be more precise in case the revision is superseded
   943     if fate == 'pruned':
   943     if fate == b'pruned':
   944         return filteredmsgtable['pruned'] % changeid
   944         return filteredmsgtable[b'pruned'] % changeid
   945     elif fate == 'diverged':
   945     elif fate == b'diverged':
   946         return filteredmsgtable['diverged'] % changeid
   946         return filteredmsgtable[b'diverged'] % changeid
   947     elif fate == 'superseded':
   947     elif fate == b'superseded':
   948         single_successor = nodemod.short(successors[0][0])
   948         single_successor = nodemod.short(successors[0][0])
   949         return filteredmsgtable['superseded'] % (changeid, single_successor)
   949         return filteredmsgtable[b'superseded'] % (changeid, single_successor)
   950     elif fate == 'superseded_split':
   950     elif fate == b'superseded_split':
   951 
   951 
   952         succs = []
   952         succs = []
   953         for node_id in successors[0]:
   953         for node_id in successors[0]:
   954             succs.append(nodemod.short(node_id))
   954             succs.append(nodemod.short(node_id))
   955 
   955 
   956         if len(succs) <= 2:
   956         if len(succs) <= 2:
   957             fmtsuccs = ', '.join(succs)
   957             fmtsuccs = b', '.join(succs)
   958             return filteredmsgtable['superseded_split'] % (changeid, fmtsuccs)
   958             return filteredmsgtable[b'superseded_split'] % (changeid, fmtsuccs)
   959         else:
   959         else:
   960             firstsuccessors = ', '.join(succs[:2])
   960             firstsuccessors = b', '.join(succs[:2])
   961             remainingnumber = len(succs) - 2
   961             remainingnumber = len(succs) - 2
   962 
   962 
   963             args = (changeid, firstsuccessors, remainingnumber)
   963             args = (changeid, firstsuccessors, remainingnumber)
   964             return filteredmsgtable['superseded_split_several'] % args
   964             return filteredmsgtable[b'superseded_split_several'] % args
   965 
   965 
   966 
   966 
   967 def divergentsets(repo, ctx):
   967 def divergentsets(repo, ctx):
   968     """Compute sets of commits divergent with a given one"""
   968     """Compute sets of commits divergent with a given one"""
   969     cache = {}
   969     cache = {}
   980             if tuple(nsuccset) in base:
   980             if tuple(nsuccset) in base:
   981                 # we already know the latest base for this divergency
   981                 # we already know the latest base for this divergency
   982                 continue
   982                 continue
   983             base[tuple(nsuccset)] = n
   983             base[tuple(nsuccset)] = n
   984     return [
   984     return [
   985         {'divergentnodes': divset, 'commonpredecessor': b}
   985         {b'divergentnodes': divset, b'commonpredecessor': b}
   986         for divset, b in base.iteritems()
   986         for divset, b in base.iteritems()
   987     ]
   987     ]
   988 
   988 
   989 
   989 
   990 def whyunstable(repo, ctx):
   990 def whyunstable(repo, ctx):
   991     result = []
   991     result = []
   992     if ctx.orphan():
   992     if ctx.orphan():
   993         for parent in ctx.parents():
   993         for parent in ctx.parents():
   994             kind = None
   994             kind = None
   995             if parent.orphan():
   995             if parent.orphan():
   996                 kind = 'orphan'
   996                 kind = b'orphan'
   997             elif parent.obsolete():
   997             elif parent.obsolete():
   998                 kind = 'obsolete'
   998                 kind = b'obsolete'
   999             if kind is not None:
   999             if kind is not None:
  1000                 result.append(
  1000                 result.append(
  1001                     {
  1001                     {
  1002                         'instability': 'orphan',
  1002                         b'instability': b'orphan',
  1003                         'reason': '%s parent' % kind,
  1003                         b'reason': b'%s parent' % kind,
  1004                         'node': parent.hex(),
  1004                         b'node': parent.hex(),
  1005                     }
  1005                     }
  1006                 )
  1006                 )
  1007     if ctx.phasedivergent():
  1007     if ctx.phasedivergent():
  1008         predecessors = allpredecessors(
  1008         predecessors = allpredecessors(
  1009             repo.obsstore, [ctx.node()], ignoreflags=bumpedfix
  1009             repo.obsstore, [ctx.node()], ignoreflags=bumpedfix
  1012             repo[p] for p in predecessors if p in repo and not repo[p].mutable()
  1012             repo[p] for p in predecessors if p in repo and not repo[p].mutable()
  1013         ]
  1013         ]
  1014         for predecessor in immutable:
  1014         for predecessor in immutable:
  1015             result.append(
  1015             result.append(
  1016                 {
  1016                 {
  1017                     'instability': 'phase-divergent',
  1017                     b'instability': b'phase-divergent',
  1018                     'reason': 'immutable predecessor',
  1018                     b'reason': b'immutable predecessor',
  1019                     'node': predecessor.hex(),
  1019                     b'node': predecessor.hex(),
  1020                 }
  1020                 }
  1021             )
  1021             )
  1022     if ctx.contentdivergent():
  1022     if ctx.contentdivergent():
  1023         dsets = divergentsets(repo, ctx)
  1023         dsets = divergentsets(repo, ctx)
  1024         for dset in dsets:
  1024         for dset in dsets:
  1025             divnodes = [repo[n] for n in dset['divergentnodes']]
  1025             divnodes = [repo[n] for n in dset[b'divergentnodes']]
  1026             result.append(
  1026             result.append(
  1027                 {
  1027                 {
  1028                     'instability': 'content-divergent',
  1028                     b'instability': b'content-divergent',
  1029                     'divergentnodes': divnodes,
  1029                     b'divergentnodes': divnodes,
  1030                     'reason': 'predecessor',
  1030                     b'reason': b'predecessor',
  1031                     'node': nodemod.hex(dset['commonpredecessor']),
  1031                     b'node': nodemod.hex(dset[b'commonpredecessor']),
  1032                 }
  1032                 }
  1033             )
  1033             )
  1034     return result
  1034     return result