mercurial/exchange.py
changeset 43077 687b865b95ad
parent 43076 2372284d9457
child 43106 d783f945a701
equal deleted inserted replaced
43076:2372284d9457 43077:687b865b95ad
    42 from .utils import stringutil
    42 from .utils import stringutil
    43 
    43 
    44 urlerr = util.urlerr
    44 urlerr = util.urlerr
    45 urlreq = util.urlreq
    45 urlreq = util.urlreq
    46 
    46 
    47 _NARROWACL_SECTION = 'narrowacl'
    47 _NARROWACL_SECTION = b'narrowacl'
    48 
    48 
    49 # Maps bundle version human names to changegroup versions.
    49 # Maps bundle version human names to changegroup versions.
    50 _bundlespeccgversions = {
    50 _bundlespeccgversions = {
    51     'v1': '01',
    51     b'v1': b'01',
    52     'v2': '02',
    52     b'v2': b'02',
    53     'packed1': 's1',
    53     b'packed1': b's1',
    54     'bundle2': '02',  # legacy
    54     b'bundle2': b'02',  # legacy
    55 }
    55 }
    56 
    56 
    57 # Maps bundle version with content opts to choose which part to bundle
    57 # Maps bundle version with content opts to choose which part to bundle
    58 _bundlespeccontentopts = {
    58 _bundlespeccontentopts = {
    59     'v1': {
    59     b'v1': {
    60         'changegroup': True,
    60         b'changegroup': True,
    61         'cg.version': '01',
    61         b'cg.version': b'01',
    62         'obsolescence': False,
    62         b'obsolescence': False,
    63         'phases': False,
    63         b'phases': False,
    64         'tagsfnodescache': False,
    64         b'tagsfnodescache': False,
    65         'revbranchcache': False,
    65         b'revbranchcache': False,
    66     },
    66     },
    67     'v2': {
    67     b'v2': {
    68         'changegroup': True,
    68         b'changegroup': True,
    69         'cg.version': '02',
    69         b'cg.version': b'02',
    70         'obsolescence': False,
    70         b'obsolescence': False,
    71         'phases': False,
    71         b'phases': False,
    72         'tagsfnodescache': True,
    72         b'tagsfnodescache': True,
    73         'revbranchcache': True,
    73         b'revbranchcache': True,
    74     },
    74     },
    75     'packed1': {'cg.version': 's1'},
    75     b'packed1': {b'cg.version': b's1'},
    76 }
    76 }
    77 _bundlespeccontentopts['bundle2'] = _bundlespeccontentopts['v2']
    77 _bundlespeccontentopts[b'bundle2'] = _bundlespeccontentopts[b'v2']
    78 
    78 
    79 _bundlespecvariants = {
    79 _bundlespecvariants = {
    80     "streamv2": {
    80     b"streamv2": {
    81         "changegroup": False,
    81         b"changegroup": False,
    82         "streamv2": True,
    82         b"streamv2": True,
    83         "tagsfnodescache": False,
    83         b"tagsfnodescache": False,
    84         "revbranchcache": False,
    84         b"revbranchcache": False,
    85     }
    85     }
    86 }
    86 }
    87 
    87 
    88 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
    88 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
    89 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
    89 _bundlespecv1compengines = {b'gzip', b'bzip2', b'none'}
    90 
    90 
    91 
    91 
    92 @attr.s
    92 @attr.s
    93 class bundlespec(object):
    93 class bundlespec(object):
    94     compression = attr.ib()
    94     compression = attr.ib()
   132     Note: this function will likely eventually return a more complex data
   132     Note: this function will likely eventually return a more complex data
   133     structure, including bundle2 part information.
   133     structure, including bundle2 part information.
   134     """
   134     """
   135 
   135 
   136     def parseparams(s):
   136     def parseparams(s):
   137         if ';' not in s:
   137         if b';' not in s:
   138             return s, {}
   138             return s, {}
   139 
   139 
   140         params = {}
   140         params = {}
   141         version, paramstr = s.split(';', 1)
   141         version, paramstr = s.split(b';', 1)
   142 
   142 
   143         for p in paramstr.split(';'):
   143         for p in paramstr.split(b';'):
   144             if '=' not in p:
   144             if b'=' not in p:
   145                 raise error.InvalidBundleSpecification(
   145                 raise error.InvalidBundleSpecification(
   146                     _(
   146                     _(
   147                         'invalid bundle specification: '
   147                         b'invalid bundle specification: '
   148                         'missing "=" in parameter: %s'
   148                         b'missing "=" in parameter: %s'
   149                     )
   149                     )
   150                     % p
   150                     % p
   151                 )
   151                 )
   152 
   152 
   153             key, value = p.split('=', 1)
   153             key, value = p.split(b'=', 1)
   154             key = urlreq.unquote(key)
   154             key = urlreq.unquote(key)
   155             value = urlreq.unquote(value)
   155             value = urlreq.unquote(value)
   156             params[key] = value
   156             params[key] = value
   157 
   157 
   158         return version, params
   158         return version, params
   159 
   159 
   160     if strict and '-' not in spec:
   160     if strict and b'-' not in spec:
   161         raise error.InvalidBundleSpecification(
   161         raise error.InvalidBundleSpecification(
   162             _(
   162             _(
   163                 'invalid bundle specification; '
   163                 b'invalid bundle specification; '
   164                 'must be prefixed with compression: %s'
   164                 b'must be prefixed with compression: %s'
   165             )
   165             )
   166             % spec
   166             % spec
   167         )
   167         )
   168 
   168 
   169     if '-' in spec:
   169     if b'-' in spec:
   170         compression, version = spec.split('-', 1)
   170         compression, version = spec.split(b'-', 1)
   171 
   171 
   172         if compression not in util.compengines.supportedbundlenames:
   172         if compression not in util.compengines.supportedbundlenames:
   173             raise error.UnsupportedBundleSpecification(
   173             raise error.UnsupportedBundleSpecification(
   174                 _('%s compression is not supported') % compression
   174                 _(b'%s compression is not supported') % compression
   175             )
   175             )
   176 
   176 
   177         version, params = parseparams(version)
   177         version, params = parseparams(version)
   178 
   178 
   179         if version not in _bundlespeccgversions:
   179         if version not in _bundlespeccgversions:
   180             raise error.UnsupportedBundleSpecification(
   180             raise error.UnsupportedBundleSpecification(
   181                 _('%s is not a recognized bundle version') % version
   181                 _(b'%s is not a recognized bundle version') % version
   182             )
   182             )
   183     else:
   183     else:
   184         # Value could be just the compression or just the version, in which
   184         # Value could be just the compression or just the version, in which
   185         # case some defaults are assumed (but only when not in strict mode).
   185         # case some defaults are assumed (but only when not in strict mode).
   186         assert not strict
   186         assert not strict
   187 
   187 
   188         spec, params = parseparams(spec)
   188         spec, params = parseparams(spec)
   189 
   189 
   190         if spec in util.compengines.supportedbundlenames:
   190         if spec in util.compengines.supportedbundlenames:
   191             compression = spec
   191             compression = spec
   192             version = 'v1'
   192             version = b'v1'
   193             # Generaldelta repos require v2.
   193             # Generaldelta repos require v2.
   194             if 'generaldelta' in repo.requirements:
   194             if b'generaldelta' in repo.requirements:
   195                 version = 'v2'
   195                 version = b'v2'
   196             # Modern compression engines require v2.
   196             # Modern compression engines require v2.
   197             if compression not in _bundlespecv1compengines:
   197             if compression not in _bundlespecv1compengines:
   198                 version = 'v2'
   198                 version = b'v2'
   199         elif spec in _bundlespeccgversions:
   199         elif spec in _bundlespeccgversions:
   200             if spec == 'packed1':
   200             if spec == b'packed1':
   201                 compression = 'none'
   201                 compression = b'none'
   202             else:
   202             else:
   203                 compression = 'bzip2'
   203                 compression = b'bzip2'
   204             version = spec
   204             version = spec
   205         else:
   205         else:
   206             raise error.UnsupportedBundleSpecification(
   206             raise error.UnsupportedBundleSpecification(
   207                 _('%s is not a recognized bundle specification') % spec
   207                 _(b'%s is not a recognized bundle specification') % spec
   208             )
   208             )
   209 
   209 
   210     # Bundle version 1 only supports a known set of compression engines.
   210     # Bundle version 1 only supports a known set of compression engines.
   211     if version == 'v1' and compression not in _bundlespecv1compengines:
   211     if version == b'v1' and compression not in _bundlespecv1compengines:
   212         raise error.UnsupportedBundleSpecification(
   212         raise error.UnsupportedBundleSpecification(
   213             _('compression engine %s is not supported on v1 bundles')
   213             _(b'compression engine %s is not supported on v1 bundles')
   214             % compression
   214             % compression
   215         )
   215         )
   216 
   216 
   217     # The specification for packed1 can optionally declare the data formats
   217     # The specification for packed1 can optionally declare the data formats
   218     # required to apply it. If we see this metadata, compare against what the
   218     # required to apply it. If we see this metadata, compare against what the
   219     # repo supports and error if the bundle isn't compatible.
   219     # repo supports and error if the bundle isn't compatible.
   220     if version == 'packed1' and 'requirements' in params:
   220     if version == b'packed1' and b'requirements' in params:
   221         requirements = set(params['requirements'].split(','))
   221         requirements = set(params[b'requirements'].split(b','))
   222         missingreqs = requirements - repo.supportedformats
   222         missingreqs = requirements - repo.supportedformats
   223         if missingreqs:
   223         if missingreqs:
   224             raise error.UnsupportedBundleSpecification(
   224             raise error.UnsupportedBundleSpecification(
   225                 _('missing support for repository features: %s')
   225                 _(b'missing support for repository features: %s')
   226                 % ', '.join(sorted(missingreqs))
   226                 % b', '.join(sorted(missingreqs))
   227             )
   227             )
   228 
   228 
   229     # Compute contentopts based on the version
   229     # Compute contentopts based on the version
   230     contentopts = _bundlespeccontentopts.get(version, {}).copy()
   230     contentopts = _bundlespeccontentopts.get(version, {}).copy()
   231 
   231 
   232     # Process the variants
   232     # Process the variants
   233     if "stream" in params and params["stream"] == "v2":
   233     if b"stream" in params and params[b"stream"] == b"v2":
   234         variant = _bundlespecvariants["streamv2"]
   234         variant = _bundlespecvariants[b"streamv2"]
   235         contentopts.update(variant)
   235         contentopts.update(variant)
   236 
   236 
   237     engine = util.compengines.forbundlename(compression)
   237     engine = util.compengines.forbundlename(compression)
   238     compression, wirecompression = engine.bundletype()
   238     compression, wirecompression = engine.bundletype()
   239     wireversion = _bundlespeccgversions[version]
   239     wireversion = _bundlespeccgversions[version]
   246 def readbundle(ui, fh, fname, vfs=None):
   246 def readbundle(ui, fh, fname, vfs=None):
   247     header = changegroup.readexactly(fh, 4)
   247     header = changegroup.readexactly(fh, 4)
   248 
   248 
   249     alg = None
   249     alg = None
   250     if not fname:
   250     if not fname:
   251         fname = "stream"
   251         fname = b"stream"
   252         if not header.startswith('HG') and header.startswith('\0'):
   252         if not header.startswith(b'HG') and header.startswith(b'\0'):
   253             fh = changegroup.headerlessfixup(fh, header)
   253             fh = changegroup.headerlessfixup(fh, header)
   254             header = "HG10"
   254             header = b"HG10"
   255             alg = 'UN'
   255             alg = b'UN'
   256     elif vfs:
   256     elif vfs:
   257         fname = vfs.join(fname)
   257         fname = vfs.join(fname)
   258 
   258 
   259     magic, version = header[0:2], header[2:4]
   259     magic, version = header[0:2], header[2:4]
   260 
   260 
   261     if magic != 'HG':
   261     if magic != b'HG':
   262         raise error.Abort(_('%s: not a Mercurial bundle') % fname)
   262         raise error.Abort(_(b'%s: not a Mercurial bundle') % fname)
   263     if version == '10':
   263     if version == b'10':
   264         if alg is None:
   264         if alg is None:
   265             alg = changegroup.readexactly(fh, 2)
   265             alg = changegroup.readexactly(fh, 2)
   266         return changegroup.cg1unpacker(fh, alg)
   266         return changegroup.cg1unpacker(fh, alg)
   267     elif version.startswith('2'):
   267     elif version.startswith(b'2'):
   268         return bundle2.getunbundler(ui, fh, magicstring=magic + version)
   268         return bundle2.getunbundler(ui, fh, magicstring=magic + version)
   269     elif version == 'S1':
   269     elif version == b'S1':
   270         return streamclone.streamcloneapplier(fh)
   270         return streamclone.streamcloneapplier(fh)
   271     else:
   271     else:
   272         raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
   272         raise error.Abort(
       
   273             _(b'%s: unknown bundle version %s') % (fname, version)
       
   274         )
   273 
   275 
   274 
   276 
   275 def getbundlespec(ui, fh):
   277 def getbundlespec(ui, fh):
   276     """Infer the bundlespec from a bundle file handle.
   278     """Infer the bundlespec from a bundle file handle.
   277 
   279 
   286             return None
   288             return None
   287 
   289 
   288     b = readbundle(ui, fh, None)
   290     b = readbundle(ui, fh, None)
   289     if isinstance(b, changegroup.cg1unpacker):
   291     if isinstance(b, changegroup.cg1unpacker):
   290         alg = b._type
   292         alg = b._type
   291         if alg == '_truncatedBZ':
   293         if alg == b'_truncatedBZ':
   292             alg = 'BZ'
   294             alg = b'BZ'
   293         comp = speccompression(alg)
   295         comp = speccompression(alg)
   294         if not comp:
   296         if not comp:
   295             raise error.Abort(_('unknown compression algorithm: %s') % alg)
   297             raise error.Abort(_(b'unknown compression algorithm: %s') % alg)
   296         return '%s-v1' % comp
   298         return b'%s-v1' % comp
   297     elif isinstance(b, bundle2.unbundle20):
   299     elif isinstance(b, bundle2.unbundle20):
   298         if 'Compression' in b.params:
   300         if b'Compression' in b.params:
   299             comp = speccompression(b.params['Compression'])
   301             comp = speccompression(b.params[b'Compression'])
   300             if not comp:
   302             if not comp:
   301                 raise error.Abort(_('unknown compression algorithm: %s') % comp)
   303                 raise error.Abort(
       
   304                     _(b'unknown compression algorithm: %s') % comp
       
   305                 )
   302         else:
   306         else:
   303             comp = 'none'
   307             comp = b'none'
   304 
   308 
   305         version = None
   309         version = None
   306         for part in b.iterparts():
   310         for part in b.iterparts():
   307             if part.type == 'changegroup':
   311             if part.type == b'changegroup':
   308                 version = part.params['version']
   312                 version = part.params[b'version']
   309                 if version in ('01', '02'):
   313                 if version in (b'01', b'02'):
   310                     version = 'v2'
   314                     version = b'v2'
   311                 else:
   315                 else:
   312                     raise error.Abort(
   316                     raise error.Abort(
   313                         _(
   317                         _(
   314                             'changegroup version %s does not have '
   318                             b'changegroup version %s does not have '
   315                             'a known bundlespec'
   319                             b'a known bundlespec'
   316                         )
   320                         )
   317                         % version,
   321                         % version,
   318                         hint=_('try upgrading your Mercurial ' 'client'),
   322                         hint=_(b'try upgrading your Mercurial ' b'client'),
   319                     )
   323                     )
   320             elif part.type == 'stream2' and version is None:
   324             elif part.type == b'stream2' and version is None:
   321                 # A stream2 part requires to be part of a v2 bundle
   325                 # A stream2 part requires to be part of a v2 bundle
   322                 requirements = urlreq.unquote(part.params['requirements'])
   326                 requirements = urlreq.unquote(part.params[b'requirements'])
   323                 splitted = requirements.split()
   327                 splitted = requirements.split()
   324                 params = bundle2._formatrequirementsparams(splitted)
   328                 params = bundle2._formatrequirementsparams(splitted)
   325                 return 'none-v2;stream=v2;%s' % params
   329                 return b'none-v2;stream=v2;%s' % params
   326 
   330 
   327         if not version:
   331         if not version:
   328             raise error.Abort(
   332             raise error.Abort(
   329                 _('could not identify changegroup version in ' 'bundle')
   333                 _(b'could not identify changegroup version in ' b'bundle')
   330             )
   334             )
   331 
   335 
   332         return '%s-%s' % (comp, version)
   336         return b'%s-%s' % (comp, version)
   333     elif isinstance(b, streamclone.streamcloneapplier):
   337     elif isinstance(b, streamclone.streamcloneapplier):
   334         requirements = streamclone.readbundle1header(fh)[2]
   338         requirements = streamclone.readbundle1header(fh)[2]
   335         formatted = bundle2._formatrequirementsparams(requirements)
   339         formatted = bundle2._formatrequirementsparams(requirements)
   336         return 'none-packed1;%s' % formatted
   340         return b'none-packed1;%s' % formatted
   337     else:
   341     else:
   338         raise error.Abort(_('unknown bundle type: %s') % b)
   342         raise error.Abort(_(b'unknown bundle type: %s') % b)
   339 
   343 
   340 
   344 
   341 def _computeoutgoing(repo, heads, common):
   345 def _computeoutgoing(repo, heads, common):
   342     """Computes which revs are outgoing given a set of common
   346     """Computes which revs are outgoing given a set of common
   343     and a set of heads.
   347     and a set of heads.
   359 
   363 
   360 
   364 
   361 def _checkpublish(pushop):
   365 def _checkpublish(pushop):
   362     repo = pushop.repo
   366     repo = pushop.repo
   363     ui = repo.ui
   367     ui = repo.ui
   364     behavior = ui.config('experimental', 'auto-publish')
   368     behavior = ui.config(b'experimental', b'auto-publish')
   365     if pushop.publish or behavior not in ('warn', 'confirm', 'abort'):
   369     if pushop.publish or behavior not in (b'warn', b'confirm', b'abort'):
   366         return
   370         return
   367     remotephases = listkeys(pushop.remote, 'phases')
   371     remotephases = listkeys(pushop.remote, b'phases')
   368     if not remotephases.get('publishing', False):
   372     if not remotephases.get(b'publishing', False):
   369         return
   373         return
   370 
   374 
   371     if pushop.revs is None:
   375     if pushop.revs is None:
   372         published = repo.filtered('served').revs('not public()')
   376         published = repo.filtered(b'served').revs(b'not public()')
   373     else:
   377     else:
   374         published = repo.revs('::%ln - public()', pushop.revs)
   378         published = repo.revs(b'::%ln - public()', pushop.revs)
   375     if published:
   379     if published:
   376         if behavior == 'warn':
   380         if behavior == b'warn':
   377             ui.warn(_('%i changesets about to be published\n') % len(published))
   381             ui.warn(
   378         elif behavior == 'confirm':
   382                 _(b'%i changesets about to be published\n') % len(published)
       
   383             )
       
   384         elif behavior == b'confirm':
   379             if ui.promptchoice(
   385             if ui.promptchoice(
   380                 _('push and publish %i changesets (yn)?' '$$ &Yes $$ &No')
   386                 _(b'push and publish %i changesets (yn)?' b'$$ &Yes $$ &No')
   381                 % len(published)
   387                 % len(published)
   382             ):
   388             ):
   383                 raise error.Abort(_('user quit'))
   389                 raise error.Abort(_(b'user quit'))
   384         elif behavior == 'abort':
   390         elif behavior == b'abort':
   385             msg = _('push would publish %i changesets') % len(published)
   391             msg = _(b'push would publish %i changesets') % len(published)
   386             hint = _(
   392             hint = _(
   387                 "use --publish or adjust 'experimental.auto-publish'" " config"
   393                 b"use --publish or adjust 'experimental.auto-publish'"
       
   394                 b" config"
   388             )
   395             )
   389             raise error.Abort(msg, hint=hint)
   396             raise error.Abort(msg, hint=hint)
   390 
   397 
   391 
   398 
   392 def _forcebundle1(op):
   399 def _forcebundle1(op):
   398     # version used during exchanged. This is especially handy during test.
   405     # version used during exchanged. This is especially handy during test.
   399     # Value is a list of bundle version to be picked from, highest version
   406     # Value is a list of bundle version to be picked from, highest version
   400     # should be used.
   407     # should be used.
   401     #
   408     #
   402     # developer config: devel.legacy.exchange
   409     # developer config: devel.legacy.exchange
   403     exchange = ui.configlist('devel', 'legacy.exchange')
   410     exchange = ui.configlist(b'devel', b'legacy.exchange')
   404     forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
   411     forcebundle1 = b'bundle2' not in exchange and b'bundle1' in exchange
   405     return forcebundle1 or not op.remote.capable('bundle2')
   412     return forcebundle1 or not op.remote.capable(b'bundle2')
   406 
   413 
   407 
   414 
   408 class pushoperation(object):
   415 class pushoperation(object):
   409     """A object that represent a single push operation
   416     """A object that represent a single push operation
   410 
   417 
   519         nm = self.repo.changelog.nodemap
   526         nm = self.repo.changelog.nodemap
   520         cheads = [node for node in self.revs if nm[node] in common]
   527         cheads = [node for node in self.revs if nm[node] in common]
   521         # and
   528         # and
   522         # * commonheads parents on missing
   529         # * commonheads parents on missing
   523         revset = unfi.set(
   530         revset = unfi.set(
   524             '%ln and parents(roots(%ln))',
   531             b'%ln and parents(roots(%ln))',
   525             self.outgoing.commonheads,
   532             self.outgoing.commonheads,
   526             self.outgoing.missing,
   533             self.outgoing.missing,
   527         )
   534         )
   528         cheads.extend(c.node() for c in revset)
   535         cheads.extend(c.node() for c in revset)
   529         return cheads
   536         return cheads
   537             return self.fallbackheads
   544             return self.fallbackheads
   538 
   545 
   539 
   546 
   540 # mapping of message used when pushing bookmark
   547 # mapping of message used when pushing bookmark
   541 bookmsgmap = {
   548 bookmsgmap = {
   542     'update': (
   549     b'update': (
   543         _("updating bookmark %s\n"),
   550         _(b"updating bookmark %s\n"),
   544         _('updating bookmark %s failed!\n'),
   551         _(b'updating bookmark %s failed!\n'),
   545     ),
   552     ),
   546     'export': (
   553     b'export': (
   547         _("exporting bookmark %s\n"),
   554         _(b"exporting bookmark %s\n"),
   548         _('exporting bookmark %s failed!\n'),
   555         _(b'exporting bookmark %s failed!\n'),
   549     ),
   556     ),
   550     'delete': (
   557     b'delete': (
   551         _("deleting remote bookmark %s\n"),
   558         _(b"deleting remote bookmark %s\n"),
   552         _('deleting remote bookmark %s failed!\n'),
   559         _(b'deleting remote bookmark %s failed!\n'),
   553     ),
   560     ),
   554 }
   561 }
   555 
   562 
   556 
   563 
   557 def push(
   564 def push(
   588         missing = (
   595         missing = (
   589             set(pushop.repo.requirements) - pushop.remote.local().supported
   596             set(pushop.repo.requirements) - pushop.remote.local().supported
   590         )
   597         )
   591         if missing:
   598         if missing:
   592             msg = _(
   599             msg = _(
   593                 "required features are not"
   600                 b"required features are not"
   594                 " supported in the destination:"
   601                 b" supported in the destination:"
   595                 " %s"
   602                 b" %s"
   596             ) % (', '.join(sorted(missing)))
   603             ) % (b', '.join(sorted(missing)))
   597             raise error.Abort(msg)
   604             raise error.Abort(msg)
   598 
   605 
   599     if not pushop.remote.canpush():
   606     if not pushop.remote.canpush():
   600         raise error.Abort(_("destination does not support push"))
   607         raise error.Abort(_(b"destination does not support push"))
   601 
   608 
   602     if not pushop.remote.capable('unbundle'):
   609     if not pushop.remote.capable(b'unbundle'):
   603         raise error.Abort(
   610         raise error.Abort(
   604             _(
   611             _(
   605                 'cannot push: destination does not support the '
   612                 b'cannot push: destination does not support the '
   606                 'unbundle wire protocol command'
   613                 b'unbundle wire protocol command'
   607             )
   614             )
   608         )
   615         )
   609 
   616 
   610     # get lock as we might write phase data
   617     # get lock as we might write phase data
   611     wlock = lock = None
   618     wlock = lock = None
   612     try:
   619     try:
   613         # bundle2 push may receive a reply bundle touching bookmarks
   620         # bundle2 push may receive a reply bundle touching bookmarks
   614         # requiring the wlock. Take it now to ensure proper ordering.
   621         # requiring the wlock. Take it now to ensure proper ordering.
   615         maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
   622         maypushback = pushop.ui.configbool(b'experimental', b'bundle2.pushback')
   616         if (
   623         if (
   617             (not _forcebundle1(pushop))
   624             (not _forcebundle1(pushop))
   618             and maypushback
   625             and maypushback
   619             and not bookmod.bookmarksinstore(repo)
   626             and not bookmod.bookmarksinstore(repo)
   620         ):
   627         ):
   621             wlock = pushop.repo.wlock()
   628             wlock = pushop.repo.wlock()
   622         lock = pushop.repo.lock()
   629         lock = pushop.repo.lock()
   623         pushop.trmanager = transactionmanager(
   630         pushop.trmanager = transactionmanager(
   624             pushop.repo, 'push-response', pushop.remote.url()
   631             pushop.repo, b'push-response', pushop.remote.url()
   625         )
   632         )
   626     except error.LockUnavailable as err:
   633     except error.LockUnavailable as err:
   627         # source repo cannot be locked.
   634         # source repo cannot be locked.
   628         # We do not abort the push, but just disable the local phase
   635         # We do not abort the push, but just disable the local phase
   629         # synchronisation.
   636         # synchronisation.
   630         msg = 'cannot lock source repository: %s\n' % stringutil.forcebytestr(
   637         msg = b'cannot lock source repository: %s\n' % stringutil.forcebytestr(
   631             err
   638             err
   632         )
   639         )
   633         pushop.ui.debug(msg)
   640         pushop.ui.debug(msg)
   634 
   641 
   635     with wlock or util.nullcontextmanager():
   642     with wlock or util.nullcontextmanager():
   643                 _pushchangeset(pushop)
   650                 _pushchangeset(pushop)
   644                 _pushsyncphase(pushop)
   651                 _pushsyncphase(pushop)
   645                 _pushobsolete(pushop)
   652                 _pushobsolete(pushop)
   646                 _pushbookmark(pushop)
   653                 _pushbookmark(pushop)
   647 
   654 
   648     if repo.ui.configbool('experimental', 'remotenames'):
   655     if repo.ui.configbool(b'experimental', b'remotenames'):
   649         logexchange.pullremotenames(repo, remote)
   656         logexchange.pullremotenames(repo, remote)
   650 
   657 
   651     return pushop
   658     return pushop
   652 
   659 
   653 
   660 
   684     for stepname in pushdiscoveryorder:
   691     for stepname in pushdiscoveryorder:
   685         step = pushdiscoverymapping[stepname]
   692         step = pushdiscoverymapping[stepname]
   686         step(pushop)
   693         step(pushop)
   687 
   694 
   688 
   695 
   689 @pushdiscovery('changeset')
   696 @pushdiscovery(b'changeset')
   690 def _pushdiscoverychangeset(pushop):
   697 def _pushdiscoverychangeset(pushop):
   691     """discover the changeset that need to be pushed"""
   698     """discover the changeset that need to be pushed"""
   692     fci = discovery.findcommonincoming
   699     fci = discovery.findcommonincoming
   693     if pushop.revs:
   700     if pushop.revs:
   694         commoninc = fci(
   701         commoninc = fci(
   711     pushop.outgoing = outgoing
   718     pushop.outgoing = outgoing
   712     pushop.remoteheads = remoteheads
   719     pushop.remoteheads = remoteheads
   713     pushop.incoming = inc
   720     pushop.incoming = inc
   714 
   721 
   715 
   722 
   716 @pushdiscovery('phase')
   723 @pushdiscovery(b'phase')
   717 def _pushdiscoveryphase(pushop):
   724 def _pushdiscoveryphase(pushop):
   718     """discover the phase that needs to be pushed
   725     """discover the phase that needs to be pushed
   719 
   726 
   720     (computed for both success and failure case for changesets push)"""
   727     (computed for both success and failure case for changesets push)"""
   721     outgoing = pushop.outgoing
   728     outgoing = pushop.outgoing
   722     unfi = pushop.repo.unfiltered()
   729     unfi = pushop.repo.unfiltered()
   723     remotephases = listkeys(pushop.remote, 'phases')
   730     remotephases = listkeys(pushop.remote, b'phases')
   724 
   731 
   725     if (
   732     if (
   726         pushop.ui.configbool('ui', '_usedassubrepo')
   733         pushop.ui.configbool(b'ui', b'_usedassubrepo')
   727         and remotephases  # server supports phases
   734         and remotephases  # server supports phases
   728         and not pushop.outgoing.missing  # no changesets to be pushed
   735         and not pushop.outgoing.missing  # no changesets to be pushed
   729         and remotephases.get('publishing', False)
   736         and remotephases.get(b'publishing', False)
   730     ):
   737     ):
   731         # When:
   738         # When:
   732         # - this is a subrepo push
   739         # - this is a subrepo push
   733         # - and remote support phase
   740         # - and remote support phase
   734         # - and no changeset are to be pushed
   741         # - and no changeset are to be pushed
   744     pushop.remotephases = phases.remotephasessummary(
   751     pushop.remotephases = phases.remotephasessummary(
   745         pushop.repo, pushop.fallbackheads, remotephases
   752         pushop.repo, pushop.fallbackheads, remotephases
   746     )
   753     )
   747     droots = pushop.remotephases.draftroots
   754     droots = pushop.remotephases.draftroots
   748 
   755 
   749     extracond = ''
   756     extracond = b''
   750     if not pushop.remotephases.publishing:
   757     if not pushop.remotephases.publishing:
   751         extracond = ' and public()'
   758         extracond = b' and public()'
   752     revset = 'heads((%%ln::%%ln) %s)' % extracond
   759     revset = b'heads((%%ln::%%ln) %s)' % extracond
   753     # Get the list of all revs draft on remote by public here.
   760     # Get the list of all revs draft on remote by public here.
   754     # XXX Beware that revset break if droots is not strictly
   761     # XXX Beware that revset break if droots is not strictly
   755     # XXX root we may want to ensure it is but it is costly
   762     # XXX root we may want to ensure it is but it is costly
   756     fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
   763     fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
   757     if not pushop.remotephases.publishing and pushop.publish:
   764     if not pushop.remotephases.publishing and pushop.publish:
   758         future = list(
   765         future = list(
   759             unfi.set(
   766             unfi.set(
   760                 '%ln and (not public() or %ln::)', pushop.futureheads, droots
   767                 b'%ln and (not public() or %ln::)', pushop.futureheads, droots
   761             )
   768             )
   762         )
   769         )
   763     elif not outgoing.missing:
   770     elif not outgoing.missing:
   764         future = fallback
   771         future = fallback
   765     else:
   772     else:
   766         # adds changeset we are going to push as draft
   773         # adds changeset we are going to push as draft
   767         #
   774         #
   768         # should not be necessary for publishing server, but because of an
   775         # should not be necessary for publishing server, but because of an
   769         # issue fixed in xxxxx we have to do it anyway.
   776         # issue fixed in xxxxx we have to do it anyway.
   770         fdroots = list(
   777         fdroots = list(
   771             unfi.set('roots(%ln  + %ln::)', outgoing.missing, droots)
   778             unfi.set(b'roots(%ln  + %ln::)', outgoing.missing, droots)
   772         )
   779         )
   773         fdroots = [f.node() for f in fdroots]
   780         fdroots = [f.node() for f in fdroots]
   774         future = list(unfi.set(revset, fdroots, pushop.futureheads))
   781         future = list(unfi.set(revset, fdroots, pushop.futureheads))
   775     pushop.outdatedphases = future
   782     pushop.outdatedphases = future
   776     pushop.fallbackoutdatedphases = fallback
   783     pushop.fallbackoutdatedphases = fallback
   777 
   784 
   778 
   785 
   779 @pushdiscovery('obsmarker')
   786 @pushdiscovery(b'obsmarker')
   780 def _pushdiscoveryobsmarkers(pushop):
   787 def _pushdiscoveryobsmarkers(pushop):
   781     if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
   788     if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
   782         return
   789         return
   783 
   790 
   784     if not pushop.repo.obsstore:
   791     if not pushop.repo.obsstore:
   785         return
   792         return
   786 
   793 
   787     if 'obsolete' not in listkeys(pushop.remote, 'namespaces'):
   794     if b'obsolete' not in listkeys(pushop.remote, b'namespaces'):
   788         return
   795         return
   789 
   796 
   790     repo = pushop.repo
   797     repo = pushop.repo
   791     # very naive computation, that can be quite expensive on big repo.
   798     # very naive computation, that can be quite expensive on big repo.
   792     # However: evolution is currently slow on them anyway.
   799     # However: evolution is currently slow on them anyway.
   793     nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
   800     nodes = (c.node() for c in repo.set(b'::%ln', pushop.futureheads))
   794     pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
   801     pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
   795 
   802 
   796 
   803 
   797 @pushdiscovery('bookmarks')
   804 @pushdiscovery(b'bookmarks')
   798 def _pushdiscoverybookmarks(pushop):
   805 def _pushdiscoverybookmarks(pushop):
   799     ui = pushop.ui
   806     ui = pushop.ui
   800     repo = pushop.repo.unfiltered()
   807     repo = pushop.repo.unfiltered()
   801     remote = pushop.remote
   808     remote = pushop.remote
   802     ui.debug("checking for updated bookmarks\n")
   809     ui.debug(b"checking for updated bookmarks\n")
   803     ancestors = ()
   810     ancestors = ()
   804     if pushop.revs:
   811     if pushop.revs:
   805         revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
   812         revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
   806         ancestors = repo.changelog.ancestors(revnums, inclusive=True)
   813         ancestors = repo.changelog.ancestors(revnums, inclusive=True)
   807 
   814 
   808     remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, 'bookmarks'))
   815     remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, b'bookmarks'))
   809 
   816 
   810     explicit = {
   817     explicit = {
   811         repo._bookmarks.expandname(bookmark) for bookmark in pushop.bookmarks
   818         repo._bookmarks.expandname(bookmark) for bookmark in pushop.bookmarks
   812     }
   819     }
   813 
   820 
   831             pushop.outbookmarks.append((b, dcid, scid))
   838             pushop.outbookmarks.append((b, dcid, scid))
   832     # search added bookmark
   839     # search added bookmark
   833     for b, scid, dcid in addsrc:
   840     for b, scid, dcid in addsrc:
   834         if b in explicit:
   841         if b in explicit:
   835             explicit.remove(b)
   842             explicit.remove(b)
   836             pushop.outbookmarks.append((b, '', scid))
   843             pushop.outbookmarks.append((b, b'', scid))
   837     # search for overwritten bookmark
   844     # search for overwritten bookmark
   838     for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
   845     for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
   839         if b in explicit:
   846         if b in explicit:
   840             explicit.remove(b)
   847             explicit.remove(b)
   841             pushop.outbookmarks.append((b, dcid, scid))
   848             pushop.outbookmarks.append((b, dcid, scid))
   842     # search for bookmark to delete
   849     # search for bookmark to delete
   843     for b, scid, dcid in adddst:
   850     for b, scid, dcid in adddst:
   844         if b in explicit:
   851         if b in explicit:
   845             explicit.remove(b)
   852             explicit.remove(b)
   846             # treat as "deleted locally"
   853             # treat as "deleted locally"
   847             pushop.outbookmarks.append((b, dcid, ''))
   854             pushop.outbookmarks.append((b, dcid, b''))
   848     # identical bookmarks shouldn't get reported
   855     # identical bookmarks shouldn't get reported
   849     for b, scid, dcid in same:
   856     for b, scid, dcid in same:
   850         if b in explicit:
   857         if b in explicit:
   851             explicit.remove(b)
   858             explicit.remove(b)
   852 
   859 
   853     if explicit:
   860     if explicit:
   854         explicit = sorted(explicit)
   861         explicit = sorted(explicit)
   855         # we should probably list all of them
   862         # we should probably list all of them
   856         pushop.ui.warn(
   863         pushop.ui.warn(
   857             _(
   864             _(
   858                 'bookmark %s does not exist on the local '
   865                 b'bookmark %s does not exist on the local '
   859                 'or remote repository!\n'
   866                 b'or remote repository!\n'
   860             )
   867             )
   861             % explicit[0]
   868             % explicit[0]
   862         )
   869         )
   863         pushop.bkresult = 2
   870         pushop.bkresult = 2
   864 
   871 
   876     if not pushop.force:
   883     if not pushop.force:
   877         # if repo.obsstore == False --> no obsolete
   884         # if repo.obsstore == False --> no obsolete
   878         # then, save the iteration
   885         # then, save the iteration
   879         if unfi.obsstore:
   886         if unfi.obsstore:
   880             # this message are here for 80 char limit reason
   887             # this message are here for 80 char limit reason
   881             mso = _("push includes obsolete changeset: %s!")
   888             mso = _(b"push includes obsolete changeset: %s!")
   882             mspd = _("push includes phase-divergent changeset: %s!")
   889             mspd = _(b"push includes phase-divergent changeset: %s!")
   883             mscd = _("push includes content-divergent changeset: %s!")
   890             mscd = _(b"push includes content-divergent changeset: %s!")
   884             mst = {
   891             mst = {
   885                 "orphan": _("push includes orphan changeset: %s!"),
   892                 b"orphan": _(b"push includes orphan changeset: %s!"),
   886                 "phase-divergent": mspd,
   893                 b"phase-divergent": mspd,
   887                 "content-divergent": mscd,
   894                 b"content-divergent": mscd,
   888             }
   895             }
   889             # If we are to push if there is at least one
   896             # If we are to push if there is at least one
   890             # obsolete or unstable changeset in missing, at
   897             # obsolete or unstable changeset in missing, at
   891             # least one of the missinghead will be obsolete or
   898             # least one of the missinghead will be obsolete or
   892             # unstable. So checking heads only is ok
   899             # unstable. So checking heads only is ok
   940     Exists as an independent function to aid extensions
   947     Exists as an independent function to aid extensions
   941     """
   948     """
   942     # * 'force' do not check for push race,
   949     # * 'force' do not check for push race,
   943     # * if we don't push anything, there are nothing to check.
   950     # * if we don't push anything, there are nothing to check.
   944     if not pushop.force and pushop.outgoing.missingheads:
   951     if not pushop.force and pushop.outgoing.missingheads:
   945         allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
   952         allowunrelated = b'related' in bundler.capabilities.get(
       
   953             b'checkheads', ()
       
   954         )
   946         emptyremote = pushop.pushbranchmap is None
   955         emptyremote = pushop.pushbranchmap is None
   947         if not allowunrelated or emptyremote:
   956         if not allowunrelated or emptyremote:
   948             bundler.newpart('check:heads', data=iter(pushop.remoteheads))
   957             bundler.newpart(b'check:heads', data=iter(pushop.remoteheads))
   949         else:
   958         else:
   950             affected = set()
   959             affected = set()
   951             for branch, heads in pushop.pushbranchmap.iteritems():
   960             for branch, heads in pushop.pushbranchmap.iteritems():
   952                 remoteheads, newheads, unsyncedheads, discardedheads = heads
   961                 remoteheads, newheads, unsyncedheads, discardedheads = heads
   953                 if remoteheads is not None:
   962                 if remoteheads is not None:
   954                     remote = set(remoteheads)
   963                     remote = set(remoteheads)
   955                     affected |= set(discardedheads) & remote
   964                     affected |= set(discardedheads) & remote
   956                     affected |= remote - set(newheads)
   965                     affected |= remote - set(newheads)
   957             if affected:
   966             if affected:
   958                 data = iter(sorted(affected))
   967                 data = iter(sorted(affected))
   959                 bundler.newpart('check:updated-heads', data=data)
   968                 bundler.newpart(b'check:updated-heads', data=data)
   960 
   969 
   961 
   970 
   962 def _pushing(pushop):
   971 def _pushing(pushop):
   963     """return True if we are pushing anything"""
   972     """return True if we are pushing anything"""
   964     return bool(
   973     return bool(
   967         or pushop.outobsmarkers
   976         or pushop.outobsmarkers
   968         or pushop.outbookmarks
   977         or pushop.outbookmarks
   969     )
   978     )
   970 
   979 
   971 
   980 
   972 @b2partsgenerator('check-bookmarks')
   981 @b2partsgenerator(b'check-bookmarks')
   973 def _pushb2checkbookmarks(pushop, bundler):
   982 def _pushb2checkbookmarks(pushop, bundler):
   974     """insert bookmark move checking"""
   983     """insert bookmark move checking"""
   975     if not _pushing(pushop) or pushop.force:
   984     if not _pushing(pushop) or pushop.force:
   976         return
   985         return
   977     b2caps = bundle2.bundle2caps(pushop.remote)
   986     b2caps = bundle2.bundle2caps(pushop.remote)
   978     hasbookmarkcheck = 'bookmarks' in b2caps
   987     hasbookmarkcheck = b'bookmarks' in b2caps
   979     if not (pushop.outbookmarks and hasbookmarkcheck):
   988     if not (pushop.outbookmarks and hasbookmarkcheck):
   980         return
   989         return
   981     data = []
   990     data = []
   982     for book, old, new in pushop.outbookmarks:
   991     for book, old, new in pushop.outbookmarks:
   983         data.append((book, old))
   992         data.append((book, old))
   984     checkdata = bookmod.binaryencode(data)
   993     checkdata = bookmod.binaryencode(data)
   985     bundler.newpart('check:bookmarks', data=checkdata)
   994     bundler.newpart(b'check:bookmarks', data=checkdata)
   986 
   995 
   987 
   996 
   988 @b2partsgenerator('check-phases')
   997 @b2partsgenerator(b'check-phases')
   989 def _pushb2checkphases(pushop, bundler):
   998 def _pushb2checkphases(pushop, bundler):
   990     """insert phase move checking"""
   999     """insert phase move checking"""
   991     if not _pushing(pushop) or pushop.force:
  1000     if not _pushing(pushop) or pushop.force:
   992         return
  1001         return
   993     b2caps = bundle2.bundle2caps(pushop.remote)
  1002     b2caps = bundle2.bundle2caps(pushop.remote)
   994     hasphaseheads = 'heads' in b2caps.get('phases', ())
  1003     hasphaseheads = b'heads' in b2caps.get(b'phases', ())
   995     if pushop.remotephases is not None and hasphaseheads:
  1004     if pushop.remotephases is not None and hasphaseheads:
   996         # check that the remote phase has not changed
  1005         # check that the remote phase has not changed
   997         checks = [[] for p in phases.allphases]
  1006         checks = [[] for p in phases.allphases]
   998         checks[phases.public].extend(pushop.remotephases.publicheads)
  1007         checks[phases.public].extend(pushop.remotephases.publicheads)
   999         checks[phases.draft].extend(pushop.remotephases.draftroots)
  1008         checks[phases.draft].extend(pushop.remotephases.draftroots)
  1000         if any(checks):
  1009         if any(checks):
  1001             for nodes in checks:
  1010             for nodes in checks:
  1002                 nodes.sort()
  1011                 nodes.sort()
  1003             checkdata = phases.binaryencode(checks)
  1012             checkdata = phases.binaryencode(checks)
  1004             bundler.newpart('check:phases', data=checkdata)
  1013             bundler.newpart(b'check:phases', data=checkdata)
  1005 
  1014 
  1006 
  1015 
  1007 @b2partsgenerator('changeset')
  1016 @b2partsgenerator(b'changeset')
  1008 def _pushb2ctx(pushop, bundler):
  1017 def _pushb2ctx(pushop, bundler):
  1009     """handle changegroup push through bundle2
  1018     """handle changegroup push through bundle2
  1010 
  1019 
  1011     addchangegroup result is stored in the ``pushop.cgresult`` attribute.
  1020     addchangegroup result is stored in the ``pushop.cgresult`` attribute.
  1012     """
  1021     """
  1013     if 'changesets' in pushop.stepsdone:
  1022     if b'changesets' in pushop.stepsdone:
  1014         return
  1023         return
  1015     pushop.stepsdone.add('changesets')
  1024     pushop.stepsdone.add(b'changesets')
  1016     # Send known heads to the server for race detection.
  1025     # Send known heads to the server for race detection.
  1017     if not _pushcheckoutgoing(pushop):
  1026     if not _pushcheckoutgoing(pushop):
  1018         return
  1027         return
  1019     pushop.repo.prepushoutgoinghooks(pushop)
  1028     pushop.repo.prepushoutgoinghooks(pushop)
  1020 
  1029 
  1021     _pushb2ctxcheckheads(pushop, bundler)
  1030     _pushb2ctxcheckheads(pushop, bundler)
  1022 
  1031 
  1023     b2caps = bundle2.bundle2caps(pushop.remote)
  1032     b2caps = bundle2.bundle2caps(pushop.remote)
  1024     version = '01'
  1033     version = b'01'
  1025     cgversions = b2caps.get('changegroup')
  1034     cgversions = b2caps.get(b'changegroup')
  1026     if cgversions:  # 3.1 and 3.2 ship with an empty value
  1035     if cgversions:  # 3.1 and 3.2 ship with an empty value
  1027         cgversions = [
  1036         cgversions = [
  1028             v
  1037             v
  1029             for v in cgversions
  1038             for v in cgversions
  1030             if v in changegroup.supportedoutgoingversions(pushop.repo)
  1039             if v in changegroup.supportedoutgoingversions(pushop.repo)
  1031         ]
  1040         ]
  1032         if not cgversions:
  1041         if not cgversions:
  1033             raise error.Abort(_('no common changegroup version'))
  1042             raise error.Abort(_(b'no common changegroup version'))
  1034         version = max(cgversions)
  1043         version = max(cgversions)
  1035     cgstream = changegroup.makestream(
  1044     cgstream = changegroup.makestream(
  1036         pushop.repo, pushop.outgoing, version, 'push'
  1045         pushop.repo, pushop.outgoing, version, b'push'
  1037     )
  1046     )
  1038     cgpart = bundler.newpart('changegroup', data=cgstream)
  1047     cgpart = bundler.newpart(b'changegroup', data=cgstream)
  1039     if cgversions:
  1048     if cgversions:
  1040         cgpart.addparam('version', version)
  1049         cgpart.addparam(b'version', version)
  1041     if 'treemanifest' in pushop.repo.requirements:
  1050     if b'treemanifest' in pushop.repo.requirements:
  1042         cgpart.addparam('treemanifest', '1')
  1051         cgpart.addparam(b'treemanifest', b'1')
  1043 
  1052 
  1044     def handlereply(op):
  1053     def handlereply(op):
  1045         """extract addchangegroup returns from server reply"""
  1054         """extract addchangegroup returns from server reply"""
  1046         cgreplies = op.records.getreplies(cgpart.id)
  1055         cgreplies = op.records.getreplies(cgpart.id)
  1047         assert len(cgreplies['changegroup']) == 1
  1056         assert len(cgreplies[b'changegroup']) == 1
  1048         pushop.cgresult = cgreplies['changegroup'][0]['return']
  1057         pushop.cgresult = cgreplies[b'changegroup'][0][b'return']
  1049 
  1058 
  1050     return handlereply
  1059     return handlereply
  1051 
  1060 
  1052 
  1061 
  1053 @b2partsgenerator('phase')
  1062 @b2partsgenerator(b'phase')
  1054 def _pushb2phases(pushop, bundler):
  1063 def _pushb2phases(pushop, bundler):
  1055     """handle phase push through bundle2"""
  1064     """handle phase push through bundle2"""
  1056     if 'phases' in pushop.stepsdone:
  1065     if b'phases' in pushop.stepsdone:
  1057         return
  1066         return
  1058     b2caps = bundle2.bundle2caps(pushop.remote)
  1067     b2caps = bundle2.bundle2caps(pushop.remote)
  1059     ui = pushop.repo.ui
  1068     ui = pushop.repo.ui
  1060 
  1069 
  1061     legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
  1070     legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
  1062     haspushkey = 'pushkey' in b2caps
  1071     haspushkey = b'pushkey' in b2caps
  1063     hasphaseheads = 'heads' in b2caps.get('phases', ())
  1072     hasphaseheads = b'heads' in b2caps.get(b'phases', ())
  1064 
  1073 
  1065     if hasphaseheads and not legacyphase:
  1074     if hasphaseheads and not legacyphase:
  1066         return _pushb2phaseheads(pushop, bundler)
  1075         return _pushb2phaseheads(pushop, bundler)
  1067     elif haspushkey:
  1076     elif haspushkey:
  1068         return _pushb2phasespushkey(pushop, bundler)
  1077         return _pushb2phasespushkey(pushop, bundler)
  1069 
  1078 
  1070 
  1079 
  1071 def _pushb2phaseheads(pushop, bundler):
  1080 def _pushb2phaseheads(pushop, bundler):
  1072     """push phase information through a bundle2 - binary part"""
  1081     """push phase information through a bundle2 - binary part"""
  1073     pushop.stepsdone.add('phases')
  1082     pushop.stepsdone.add(b'phases')
  1074     if pushop.outdatedphases:
  1083     if pushop.outdatedphases:
  1075         updates = [[] for p in phases.allphases]
  1084         updates = [[] for p in phases.allphases]
  1076         updates[0].extend(h.node() for h in pushop.outdatedphases)
  1085         updates[0].extend(h.node() for h in pushop.outdatedphases)
  1077         phasedata = phases.binaryencode(updates)
  1086         phasedata = phases.binaryencode(updates)
  1078         bundler.newpart('phase-heads', data=phasedata)
  1087         bundler.newpart(b'phase-heads', data=phasedata)
  1079 
  1088 
  1080 
  1089 
  1081 def _pushb2phasespushkey(pushop, bundler):
  1090 def _pushb2phasespushkey(pushop, bundler):
  1082     """push phase information through a bundle2 - pushkey part"""
  1091     """push phase information through a bundle2 - pushkey part"""
  1083     pushop.stepsdone.add('phases')
  1092     pushop.stepsdone.add(b'phases')
  1084     part2node = []
  1093     part2node = []
  1085 
  1094 
  1086     def handlefailure(pushop, exc):
  1095     def handlefailure(pushop, exc):
  1087         targetid = int(exc.partid)
  1096         targetid = int(exc.partid)
  1088         for partid, node in part2node:
  1097         for partid, node in part2node:
  1089             if partid == targetid:
  1098             if partid == targetid:
  1090                 raise error.Abort(_('updating %s to public failed') % node)
  1099                 raise error.Abort(_(b'updating %s to public failed') % node)
  1091 
  1100 
  1092     enc = pushkey.encode
  1101     enc = pushkey.encode
  1093     for newremotehead in pushop.outdatedphases:
  1102     for newremotehead in pushop.outdatedphases:
  1094         part = bundler.newpart('pushkey')
  1103         part = bundler.newpart(b'pushkey')
  1095         part.addparam('namespace', enc('phases'))
  1104         part.addparam(b'namespace', enc(b'phases'))
  1096         part.addparam('key', enc(newremotehead.hex()))
  1105         part.addparam(b'key', enc(newremotehead.hex()))
  1097         part.addparam('old', enc('%d' % phases.draft))
  1106         part.addparam(b'old', enc(b'%d' % phases.draft))
  1098         part.addparam('new', enc('%d' % phases.public))
  1107         part.addparam(b'new', enc(b'%d' % phases.public))
  1099         part2node.append((part.id, newremotehead))
  1108         part2node.append((part.id, newremotehead))
  1100         pushop.pkfailcb[part.id] = handlefailure
  1109         pushop.pkfailcb[part.id] = handlefailure
  1101 
  1110 
  1102     def handlereply(op):
  1111     def handlereply(op):
  1103         for partid, node in part2node:
  1112         for partid, node in part2node:
  1104             partrep = op.records.getreplies(partid)
  1113             partrep = op.records.getreplies(partid)
  1105             results = partrep['pushkey']
  1114             results = partrep[b'pushkey']
  1106             assert len(results) <= 1
  1115             assert len(results) <= 1
  1107             msg = None
  1116             msg = None
  1108             if not results:
  1117             if not results:
  1109                 msg = _('server ignored update of %s to public!\n') % node
  1118                 msg = _(b'server ignored update of %s to public!\n') % node
  1110             elif not int(results[0]['return']):
  1119             elif not int(results[0][b'return']):
  1111                 msg = _('updating %s to public failed!\n') % node
  1120                 msg = _(b'updating %s to public failed!\n') % node
  1112             if msg is not None:
  1121             if msg is not None:
  1113                 pushop.ui.warn(msg)
  1122                 pushop.ui.warn(msg)
  1114 
  1123 
  1115     return handlereply
  1124     return handlereply
  1116 
  1125 
  1117 
  1126 
  1118 @b2partsgenerator('obsmarkers')
  1127 @b2partsgenerator(b'obsmarkers')
  1119 def _pushb2obsmarkers(pushop, bundler):
  1128 def _pushb2obsmarkers(pushop, bundler):
  1120     if 'obsmarkers' in pushop.stepsdone:
  1129     if b'obsmarkers' in pushop.stepsdone:
  1121         return
  1130         return
  1122     remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
  1131     remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
  1123     if obsolete.commonversion(remoteversions) is None:
  1132     if obsolete.commonversion(remoteversions) is None:
  1124         return
  1133         return
  1125     pushop.stepsdone.add('obsmarkers')
  1134     pushop.stepsdone.add(b'obsmarkers')
  1126     if pushop.outobsmarkers:
  1135     if pushop.outobsmarkers:
  1127         markers = sorted(pushop.outobsmarkers)
  1136         markers = sorted(pushop.outobsmarkers)
  1128         bundle2.buildobsmarkerspart(bundler, markers)
  1137         bundle2.buildobsmarkerspart(bundler, markers)
  1129 
  1138 
  1130 
  1139 
  1131 @b2partsgenerator('bookmarks')
  1140 @b2partsgenerator(b'bookmarks')
  1132 def _pushb2bookmarks(pushop, bundler):
  1141 def _pushb2bookmarks(pushop, bundler):
  1133     """handle bookmark push through bundle2"""
  1142     """handle bookmark push through bundle2"""
  1134     if 'bookmarks' in pushop.stepsdone:
  1143     if b'bookmarks' in pushop.stepsdone:
  1135         return
  1144         return
  1136     b2caps = bundle2.bundle2caps(pushop.remote)
  1145     b2caps = bundle2.bundle2caps(pushop.remote)
  1137 
  1146 
  1138     legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
  1147     legacy = pushop.repo.ui.configlist(b'devel', b'legacy.exchange')
  1139     legacybooks = 'bookmarks' in legacy
  1148     legacybooks = b'bookmarks' in legacy
  1140 
  1149 
  1141     if not legacybooks and 'bookmarks' in b2caps:
  1150     if not legacybooks and b'bookmarks' in b2caps:
  1142         return _pushb2bookmarkspart(pushop, bundler)
  1151         return _pushb2bookmarkspart(pushop, bundler)
  1143     elif 'pushkey' in b2caps:
  1152     elif b'pushkey' in b2caps:
  1144         return _pushb2bookmarkspushkey(pushop, bundler)
  1153         return _pushb2bookmarkspushkey(pushop, bundler)
  1145 
  1154 
  1146 
  1155 
  1147 def _bmaction(old, new):
  1156 def _bmaction(old, new):
  1148     """small utility for bookmark pushing"""
  1157     """small utility for bookmark pushing"""
  1149     if not old:
  1158     if not old:
  1150         return 'export'
  1159         return b'export'
  1151     elif not new:
  1160     elif not new:
  1152         return 'delete'
  1161         return b'delete'
  1153     return 'update'
  1162     return b'update'
  1154 
  1163 
  1155 
  1164 
  1156 def _abortonsecretctx(pushop, node, b):
  1165 def _abortonsecretctx(pushop, node, b):
  1157     """abort if a given bookmark points to a secret changeset"""
  1166     """abort if a given bookmark points to a secret changeset"""
  1158     if node and pushop.repo[node].phase() == phases.secret:
  1167     if node and pushop.repo[node].phase() == phases.secret:
  1159         raise error.Abort(
  1168         raise error.Abort(
  1160             _('cannot push bookmark %s as it points to a secret' ' changeset')
  1169             _(b'cannot push bookmark %s as it points to a secret' b' changeset')
  1161             % b
  1170             % b
  1162         )
  1171         )
  1163 
  1172 
  1164 
  1173 
  1165 def _pushb2bookmarkspart(pushop, bundler):
  1174 def _pushb2bookmarkspart(pushop, bundler):
  1166     pushop.stepsdone.add('bookmarks')
  1175     pushop.stepsdone.add(b'bookmarks')
  1167     if not pushop.outbookmarks:
  1176     if not pushop.outbookmarks:
  1168         return
  1177         return
  1169 
  1178 
  1170     allactions = []
  1179     allactions = []
  1171     data = []
  1180     data = []
  1172     for book, old, new in pushop.outbookmarks:
  1181     for book, old, new in pushop.outbookmarks:
  1173         _abortonsecretctx(pushop, new, book)
  1182         _abortonsecretctx(pushop, new, book)
  1174         data.append((book, new))
  1183         data.append((book, new))
  1175         allactions.append((book, _bmaction(old, new)))
  1184         allactions.append((book, _bmaction(old, new)))
  1176     checkdata = bookmod.binaryencode(data)
  1185     checkdata = bookmod.binaryencode(data)
  1177     bundler.newpart('bookmarks', data=checkdata)
  1186     bundler.newpart(b'bookmarks', data=checkdata)
  1178 
  1187 
  1179     def handlereply(op):
  1188     def handlereply(op):
  1180         ui = pushop.ui
  1189         ui = pushop.ui
  1181         # if success
  1190         # if success
  1182         for book, action in allactions:
  1191         for book, action in allactions:
  1184 
  1193 
  1185     return handlereply
  1194     return handlereply
  1186 
  1195 
  1187 
  1196 
  1188 def _pushb2bookmarkspushkey(pushop, bundler):
  1197 def _pushb2bookmarkspushkey(pushop, bundler):
  1189     pushop.stepsdone.add('bookmarks')
  1198     pushop.stepsdone.add(b'bookmarks')
  1190     part2book = []
  1199     part2book = []
  1191     enc = pushkey.encode
  1200     enc = pushkey.encode
  1192 
  1201 
  1193     def handlefailure(pushop, exc):
  1202     def handlefailure(pushop, exc):
  1194         targetid = int(exc.partid)
  1203         targetid = int(exc.partid)
  1198         # we should not be called for part we did not generated
  1207         # we should not be called for part we did not generated
  1199         assert False
  1208         assert False
  1200 
  1209 
  1201     for book, old, new in pushop.outbookmarks:
  1210     for book, old, new in pushop.outbookmarks:
  1202         _abortonsecretctx(pushop, new, book)
  1211         _abortonsecretctx(pushop, new, book)
  1203         part = bundler.newpart('pushkey')
  1212         part = bundler.newpart(b'pushkey')
  1204         part.addparam('namespace', enc('bookmarks'))
  1213         part.addparam(b'namespace', enc(b'bookmarks'))
  1205         part.addparam('key', enc(book))
  1214         part.addparam(b'key', enc(book))
  1206         part.addparam('old', enc(hex(old)))
  1215         part.addparam(b'old', enc(hex(old)))
  1207         part.addparam('new', enc(hex(new)))
  1216         part.addparam(b'new', enc(hex(new)))
  1208         action = 'update'
  1217         action = b'update'
  1209         if not old:
  1218         if not old:
  1210             action = 'export'
  1219             action = b'export'
  1211         elif not new:
  1220         elif not new:
  1212             action = 'delete'
  1221             action = b'delete'
  1213         part2book.append((part.id, book, action))
  1222         part2book.append((part.id, book, action))
  1214         pushop.pkfailcb[part.id] = handlefailure
  1223         pushop.pkfailcb[part.id] = handlefailure
  1215 
  1224 
  1216     def handlereply(op):
  1225     def handlereply(op):
  1217         ui = pushop.ui
  1226         ui = pushop.ui
  1218         for partid, book, action in part2book:
  1227         for partid, book, action in part2book:
  1219             partrep = op.records.getreplies(partid)
  1228             partrep = op.records.getreplies(partid)
  1220             results = partrep['pushkey']
  1229             results = partrep[b'pushkey']
  1221             assert len(results) <= 1
  1230             assert len(results) <= 1
  1222             if not results:
  1231             if not results:
  1223                 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
  1232                 pushop.ui.warn(_(b'server ignored bookmark %s update\n') % book)
  1224             else:
  1233             else:
  1225                 ret = int(results[0]['return'])
  1234                 ret = int(results[0][b'return'])
  1226                 if ret:
  1235                 if ret:
  1227                     ui.status(bookmsgmap[action][0] % book)
  1236                     ui.status(bookmsgmap[action][0] % book)
  1228                 else:
  1237                 else:
  1229                     ui.warn(bookmsgmap[action][1] % book)
  1238                     ui.warn(bookmsgmap[action][1] % book)
  1230                     if pushop.bkresult is not None:
  1239                     if pushop.bkresult is not None:
  1231                         pushop.bkresult = 1
  1240                         pushop.bkresult = 1
  1232 
  1241 
  1233     return handlereply
  1242     return handlereply
  1234 
  1243 
  1235 
  1244 
  1236 @b2partsgenerator('pushvars', idx=0)
  1245 @b2partsgenerator(b'pushvars', idx=0)
  1237 def _getbundlesendvars(pushop, bundler):
  1246 def _getbundlesendvars(pushop, bundler):
  1238     '''send shellvars via bundle2'''
  1247     '''send shellvars via bundle2'''
  1239     pushvars = pushop.pushvars
  1248     pushvars = pushop.pushvars
  1240     if pushvars:
  1249     if pushvars:
  1241         shellvars = {}
  1250         shellvars = {}
  1242         for raw in pushvars:
  1251         for raw in pushvars:
  1243             if '=' not in raw:
  1252             if b'=' not in raw:
  1244                 msg = (
  1253                 msg = (
  1245                     "unable to parse variable '%s', should follow "
  1254                     b"unable to parse variable '%s', should follow "
  1246                     "'KEY=VALUE' or 'KEY=' format"
  1255                     b"'KEY=VALUE' or 'KEY=' format"
  1247                 )
  1256                 )
  1248                 raise error.Abort(msg % raw)
  1257                 raise error.Abort(msg % raw)
  1249             k, v = raw.split('=', 1)
  1258             k, v = raw.split(b'=', 1)
  1250             shellvars[k] = v
  1259             shellvars[k] = v
  1251 
  1260 
  1252         part = bundler.newpart('pushvars')
  1261         part = bundler.newpart(b'pushvars')
  1253 
  1262 
  1254         for key, value in shellvars.iteritems():
  1263         for key, value in shellvars.iteritems():
  1255             part.addparam(key, value, mandatory=False)
  1264             part.addparam(key, value, mandatory=False)
  1256 
  1265 
  1257 
  1266 
  1260 
  1269 
  1261     The only currently supported type of data is changegroup but this will
  1270     The only currently supported type of data is changegroup but this will
  1262     evolve in the future."""
  1271     evolve in the future."""
  1263     bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
  1272     bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
  1264     pushback = pushop.trmanager and pushop.ui.configbool(
  1273     pushback = pushop.trmanager and pushop.ui.configbool(
  1265         'experimental', 'bundle2.pushback'
  1274         b'experimental', b'bundle2.pushback'
  1266     )
  1275     )
  1267 
  1276 
  1268     # create reply capability
  1277     # create reply capability
  1269     capsblob = bundle2.encodecaps(
  1278     capsblob = bundle2.encodecaps(
  1270         bundle2.getrepocaps(pushop.repo, allowpushback=pushback, role='client')
  1279         bundle2.getrepocaps(pushop.repo, allowpushback=pushback, role=b'client')
  1271     )
  1280     )
  1272     bundler.newpart('replycaps', data=capsblob)
  1281     bundler.newpart(b'replycaps', data=capsblob)
  1273     replyhandlers = []
  1282     replyhandlers = []
  1274     for partgenname in b2partsgenorder:
  1283     for partgenname in b2partsgenorder:
  1275         partgen = b2partsgenmapping[partgenname]
  1284         partgen = b2partsgenmapping[partgenname]
  1276         ret = partgen(pushop, bundler)
  1285         ret = partgen(pushop, bundler)
  1277         if callable(ret):
  1286         if callable(ret):
  1282     stream = util.chunkbuffer(bundler.getchunks())
  1291     stream = util.chunkbuffer(bundler.getchunks())
  1283     try:
  1292     try:
  1284         try:
  1293         try:
  1285             with pushop.remote.commandexecutor() as e:
  1294             with pushop.remote.commandexecutor() as e:
  1286                 reply = e.callcommand(
  1295                 reply = e.callcommand(
  1287                     'unbundle',
  1296                     b'unbundle',
  1288                     {
  1297                     {
  1289                         'bundle': stream,
  1298                         b'bundle': stream,
  1290                         'heads': ['force'],
  1299                         b'heads': [b'force'],
  1291                         'url': pushop.remote.url(),
  1300                         b'url': pushop.remote.url(),
  1292                     },
  1301                     },
  1293                 ).result()
  1302                 ).result()
  1294         except error.BundleValueError as exc:
  1303         except error.BundleValueError as exc:
  1295             raise error.Abort(_('missing support for %s') % exc)
  1304             raise error.Abort(_(b'missing support for %s') % exc)
  1296         try:
  1305         try:
  1297             trgetter = None
  1306             trgetter = None
  1298             if pushback:
  1307             if pushback:
  1299                 trgetter = pushop.trmanager.transaction
  1308                 trgetter = pushop.trmanager.transaction
  1300             op = bundle2.processbundle(pushop.repo, reply, trgetter)
  1309             op = bundle2.processbundle(pushop.repo, reply, trgetter)
  1301         except error.BundleValueError as exc:
  1310         except error.BundleValueError as exc:
  1302             raise error.Abort(_('missing support for %s') % exc)
  1311             raise error.Abort(_(b'missing support for %s') % exc)
  1303         except bundle2.AbortFromPart as exc:
  1312         except bundle2.AbortFromPart as exc:
  1304             pushop.ui.status(_('remote: %s\n') % exc)
  1313             pushop.ui.status(_(b'remote: %s\n') % exc)
  1305             if exc.hint is not None:
  1314             if exc.hint is not None:
  1306                 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
  1315                 pushop.ui.status(_(b'remote: %s\n') % (b'(%s)' % exc.hint))
  1307             raise error.Abort(_('push failed on remote'))
  1316             raise error.Abort(_(b'push failed on remote'))
  1308     except error.PushkeyFailed as exc:
  1317     except error.PushkeyFailed as exc:
  1309         partid = int(exc.partid)
  1318         partid = int(exc.partid)
  1310         if partid not in pushop.pkfailcb:
  1319         if partid not in pushop.pkfailcb:
  1311             raise
  1320             raise
  1312         pushop.pkfailcb[partid](pushop, exc)
  1321         pushop.pkfailcb[partid](pushop, exc)
  1314         rephand(op)
  1323         rephand(op)
  1315 
  1324 
  1316 
  1325 
  1317 def _pushchangeset(pushop):
  1326 def _pushchangeset(pushop):
  1318     """Make the actual push of changeset bundle to remote repo"""
  1327     """Make the actual push of changeset bundle to remote repo"""
  1319     if 'changesets' in pushop.stepsdone:
  1328     if b'changesets' in pushop.stepsdone:
  1320         return
  1329         return
  1321     pushop.stepsdone.add('changesets')
  1330     pushop.stepsdone.add(b'changesets')
  1322     if not _pushcheckoutgoing(pushop):
  1331     if not _pushcheckoutgoing(pushop):
  1323         return
  1332         return
  1324 
  1333 
  1325     # Should have verified this in push().
  1334     # Should have verified this in push().
  1326     assert pushop.remote.capable('unbundle')
  1335     assert pushop.remote.capable(b'unbundle')
  1327 
  1336 
  1328     pushop.repo.prepushoutgoinghooks(pushop)
  1337     pushop.repo.prepushoutgoinghooks(pushop)
  1329     outgoing = pushop.outgoing
  1338     outgoing = pushop.outgoing
  1330     # TODO: get bundlecaps from remote
  1339     # TODO: get bundlecaps from remote
  1331     bundlecaps = None
  1340     bundlecaps = None
  1336         # push everything,
  1345         # push everything,
  1337         # use the fast path, no race possible on push
  1346         # use the fast path, no race possible on push
  1338         cg = changegroup.makechangegroup(
  1347         cg = changegroup.makechangegroup(
  1339             pushop.repo,
  1348             pushop.repo,
  1340             outgoing,
  1349             outgoing,
  1341             '01',
  1350             b'01',
  1342             'push',
  1351             b'push',
  1343             fastpath=True,
  1352             fastpath=True,
  1344             bundlecaps=bundlecaps,
  1353             bundlecaps=bundlecaps,
  1345         )
  1354         )
  1346     else:
  1355     else:
  1347         cg = changegroup.makechangegroup(
  1356         cg = changegroup.makechangegroup(
  1348             pushop.repo, outgoing, '01', 'push', bundlecaps=bundlecaps
  1357             pushop.repo, outgoing, b'01', b'push', bundlecaps=bundlecaps
  1349         )
  1358         )
  1350 
  1359 
  1351     # apply changegroup to remote
  1360     # apply changegroup to remote
  1352     # local repo finds heads on server, finds out what
  1361     # local repo finds heads on server, finds out what
  1353     # revs it must push. once revs transferred, if server
  1362     # revs it must push. once revs transferred, if server
  1354     # finds it has different heads (someone else won
  1363     # finds it has different heads (someone else won
  1355     # commit/push race), server aborts.
  1364     # commit/push race), server aborts.
  1356     if pushop.force:
  1365     if pushop.force:
  1357         remoteheads = ['force']
  1366         remoteheads = [b'force']
  1358     else:
  1367     else:
  1359         remoteheads = pushop.remoteheads
  1368         remoteheads = pushop.remoteheads
  1360     # ssh: return remote's addchangegroup()
  1369     # ssh: return remote's addchangegroup()
  1361     # http: return remote's addchangegroup() or 0 for error
  1370     # http: return remote's addchangegroup() or 0 for error
  1362     pushop.cgresult = pushop.remote.unbundle(cg, remoteheads, pushop.repo.url())
  1371     pushop.cgresult = pushop.remote.unbundle(cg, remoteheads, pushop.repo.url())
  1364 
  1373 
  1365 def _pushsyncphase(pushop):
  1374 def _pushsyncphase(pushop):
  1366     """synchronise phase information locally and remotely"""
  1375     """synchronise phase information locally and remotely"""
  1367     cheads = pushop.commonheads
  1376     cheads = pushop.commonheads
  1368     # even when we don't push, exchanging phase data is useful
  1377     # even when we don't push, exchanging phase data is useful
  1369     remotephases = listkeys(pushop.remote, 'phases')
  1378     remotephases = listkeys(pushop.remote, b'phases')
  1370     if (
  1379     if (
  1371         pushop.ui.configbool('ui', '_usedassubrepo')
  1380         pushop.ui.configbool(b'ui', b'_usedassubrepo')
  1372         and remotephases  # server supports phases
  1381         and remotephases  # server supports phases
  1373         and pushop.cgresult is None  # nothing was pushed
  1382         and pushop.cgresult is None  # nothing was pushed
  1374         and remotephases.get('publishing', False)
  1383         and remotephases.get(b'publishing', False)
  1375     ):
  1384     ):
  1376         # When:
  1385         # When:
  1377         # - this is a subrepo push
  1386         # - this is a subrepo push
  1378         # - and remote support phase
  1387         # - and remote support phase
  1379         # - and no changeset was pushed
  1388         # - and no changeset was pushed
  1380         # - and remote is publishing
  1389         # - and remote is publishing
  1381         # We may be in issue 3871 case!
  1390         # We may be in issue 3871 case!
  1382         # We drop the possible phase synchronisation done by
  1391         # We drop the possible phase synchronisation done by
  1383         # courtesy to publish changesets possibly locally draft
  1392         # courtesy to publish changesets possibly locally draft
  1384         # on the remote.
  1393         # on the remote.
  1385         remotephases = {'publishing': 'True'}
  1394         remotephases = {b'publishing': b'True'}
  1386     if not remotephases:  # old server or public only reply from non-publishing
  1395     if not remotephases:  # old server or public only reply from non-publishing
  1387         _localphasemove(pushop, cheads)
  1396         _localphasemove(pushop, cheads)
  1388         # don't push any phase data as there is nothing to push
  1397         # don't push any phase data as there is nothing to push
  1389     else:
  1398     else:
  1390         ana = phases.analyzeremotephases(pushop.repo, cheads, remotephases)
  1399         ana = phases.analyzeremotephases(pushop.repo, cheads, remotephases)
  1391         pheads, droots = ana
  1400         pheads, droots = ana
  1392         ### Apply remote phase on local
  1401         ### Apply remote phase on local
  1393         if remotephases.get('publishing', False):
  1402         if remotephases.get(b'publishing', False):
  1394             _localphasemove(pushop, cheads)
  1403             _localphasemove(pushop, cheads)
  1395         else:  # publish = False
  1404         else:  # publish = False
  1396             _localphasemove(pushop, pheads)
  1405             _localphasemove(pushop, pheads)
  1397             _localphasemove(pushop, cheads, phases.draft)
  1406             _localphasemove(pushop, cheads, phases.draft)
  1398         ### Apply local phase on remote
  1407         ### Apply local phase on remote
  1399 
  1408 
  1400         if pushop.cgresult:
  1409         if pushop.cgresult:
  1401             if 'phases' in pushop.stepsdone:
  1410             if b'phases' in pushop.stepsdone:
  1402                 # phases already pushed though bundle2
  1411                 # phases already pushed though bundle2
  1403                 return
  1412                 return
  1404             outdated = pushop.outdatedphases
  1413             outdated = pushop.outdatedphases
  1405         else:
  1414         else:
  1406             outdated = pushop.fallbackoutdatedphases
  1415             outdated = pushop.fallbackoutdatedphases
  1407 
  1416 
  1408         pushop.stepsdone.add('phases')
  1417         pushop.stepsdone.add(b'phases')
  1409 
  1418 
  1410         # filter heads already turned public by the push
  1419         # filter heads already turned public by the push
  1411         outdated = [c for c in outdated if c.node() not in pheads]
  1420         outdated = [c for c in outdated if c.node() not in pheads]
  1412         # fallback to independent pushkey command
  1421         # fallback to independent pushkey command
  1413         for newremotehead in outdated:
  1422         for newremotehead in outdated:
  1414             with pushop.remote.commandexecutor() as e:
  1423             with pushop.remote.commandexecutor() as e:
  1415                 r = e.callcommand(
  1424                 r = e.callcommand(
  1416                     'pushkey',
  1425                     b'pushkey',
  1417                     {
  1426                     {
  1418                         'namespace': 'phases',
  1427                         b'namespace': b'phases',
  1419                         'key': newremotehead.hex(),
  1428                         b'key': newremotehead.hex(),
  1420                         'old': '%d' % phases.draft,
  1429                         b'old': b'%d' % phases.draft,
  1421                         'new': '%d' % phases.public,
  1430                         b'new': b'%d' % phases.public,
  1422                     },
  1431                     },
  1423                 ).result()
  1432                 ).result()
  1424 
  1433 
  1425             if not r:
  1434             if not r:
  1426                 pushop.ui.warn(
  1435                 pushop.ui.warn(
  1427                     _('updating %s to public failed!\n') % newremotehead
  1436                     _(b'updating %s to public failed!\n') % newremotehead
  1428                 )
  1437                 )
  1429 
  1438 
  1430 
  1439 
  1431 def _localphasemove(pushop, nodes, phase=phases.public):
  1440 def _localphasemove(pushop, nodes, phase=phases.public):
  1432     """move <nodes> to <phase> in the local source repo"""
  1441     """move <nodes> to <phase> in the local source repo"""
  1441         actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
  1450         actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
  1442         phasestr = phases.phasenames[phase]
  1451         phasestr = phases.phasenames[phase]
  1443         if actualmoves:
  1452         if actualmoves:
  1444             pushop.ui.status(
  1453             pushop.ui.status(
  1445                 _(
  1454                 _(
  1446                     'cannot lock source repo, skipping '
  1455                     b'cannot lock source repo, skipping '
  1447                     'local %s phase update\n'
  1456                     b'local %s phase update\n'
  1448                 )
  1457                 )
  1449                 % phasestr
  1458                 % phasestr
  1450             )
  1459             )
  1451 
  1460 
  1452 
  1461 
  1453 def _pushobsolete(pushop):
  1462 def _pushobsolete(pushop):
  1454     """utility function to push obsolete markers to a remote"""
  1463     """utility function to push obsolete markers to a remote"""
  1455     if 'obsmarkers' in pushop.stepsdone:
  1464     if b'obsmarkers' in pushop.stepsdone:
  1456         return
  1465         return
  1457     repo = pushop.repo
  1466     repo = pushop.repo
  1458     remote = pushop.remote
  1467     remote = pushop.remote
  1459     pushop.stepsdone.add('obsmarkers')
  1468     pushop.stepsdone.add(b'obsmarkers')
  1460     if pushop.outobsmarkers:
  1469     if pushop.outobsmarkers:
  1461         pushop.ui.debug('try to push obsolete markers to remote\n')
  1470         pushop.ui.debug(b'try to push obsolete markers to remote\n')
  1462         rslts = []
  1471         rslts = []
  1463         remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
  1472         remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
  1464         for key in sorted(remotedata, reverse=True):
  1473         for key in sorted(remotedata, reverse=True):
  1465             # reverse sort to ensure we end with dump0
  1474             # reverse sort to ensure we end with dump0
  1466             data = remotedata[key]
  1475             data = remotedata[key]
  1467             rslts.append(remote.pushkey('obsolete', key, '', data))
  1476             rslts.append(remote.pushkey(b'obsolete', key, b'', data))
  1468         if [r for r in rslts if not r]:
  1477         if [r for r in rslts if not r]:
  1469             msg = _('failed to push some obsolete markers!\n')
  1478             msg = _(b'failed to push some obsolete markers!\n')
  1470             repo.ui.warn(msg)
  1479             repo.ui.warn(msg)
  1471 
  1480 
  1472 
  1481 
  1473 def _pushbookmark(pushop):
  1482 def _pushbookmark(pushop):
  1474     """Update bookmark position on remote"""
  1483     """Update bookmark position on remote"""
  1475     if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
  1484     if pushop.cgresult == 0 or b'bookmarks' in pushop.stepsdone:
  1476         return
  1485         return
  1477     pushop.stepsdone.add('bookmarks')
  1486     pushop.stepsdone.add(b'bookmarks')
  1478     ui = pushop.ui
  1487     ui = pushop.ui
  1479     remote = pushop.remote
  1488     remote = pushop.remote
  1480 
  1489 
  1481     for b, old, new in pushop.outbookmarks:
  1490     for b, old, new in pushop.outbookmarks:
  1482         action = 'update'
  1491         action = b'update'
  1483         if not old:
  1492         if not old:
  1484             action = 'export'
  1493             action = b'export'
  1485         elif not new:
  1494         elif not new:
  1486             action = 'delete'
  1495             action = b'delete'
  1487 
  1496 
  1488         with remote.commandexecutor() as e:
  1497         with remote.commandexecutor() as e:
  1489             r = e.callcommand(
  1498             r = e.callcommand(
  1490                 'pushkey',
  1499                 b'pushkey',
  1491                 {
  1500                 {
  1492                     'namespace': 'bookmarks',
  1501                     b'namespace': b'bookmarks',
  1493                     'key': b,
  1502                     b'key': b,
  1494                     'old': hex(old),
  1503                     b'old': hex(old),
  1495                     'new': hex(new),
  1504                     b'new': hex(new),
  1496                 },
  1505                 },
  1497             ).result()
  1506             ).result()
  1498 
  1507 
  1499         if r:
  1508         if r:
  1500             ui.status(bookmsgmap[action][0] % b)
  1509             ui.status(bookmsgmap[action][0] % b)
  1608         self._tr = None
  1617         self._tr = None
  1609 
  1618 
  1610     def transaction(self):
  1619     def transaction(self):
  1611         """Return an open transaction object, constructing if necessary"""
  1620         """Return an open transaction object, constructing if necessary"""
  1612         if not self._tr:
  1621         if not self._tr:
  1613             trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
  1622             trname = b'%s\n%s' % (self.source, util.hidepassword(self.url))
  1614             self._tr = self.repo.transaction(trname)
  1623             self._tr = self.repo.transaction(trname)
  1615             self._tr.hookargs['source'] = self.source
  1624             self._tr.hookargs[b'source'] = self.source
  1616             self._tr.hookargs['url'] = self.url
  1625             self._tr.hookargs[b'url'] = self.url
  1617         return self._tr
  1626         return self._tr
  1618 
  1627 
  1619     def close(self):
  1628     def close(self):
  1620         """close transaction if created"""
  1629         """close transaction if created"""
  1621         if self._tr is not None:
  1630         if self._tr is not None:
  1627             self._tr.release()
  1636             self._tr.release()
  1628 
  1637 
  1629 
  1638 
  1630 def listkeys(remote, namespace):
  1639 def listkeys(remote, namespace):
  1631     with remote.commandexecutor() as e:
  1640     with remote.commandexecutor() as e:
  1632         return e.callcommand('listkeys', {'namespace': namespace}).result()
  1641         return e.callcommand(b'listkeys', {b'namespace': namespace}).result()
  1633 
  1642 
  1634 
  1643 
  1635 def _fullpullbundle2(repo, pullop):
  1644 def _fullpullbundle2(repo, pullop):
  1636     # The server may send a partial reply, i.e. when inlining
  1645     # The server may send a partial reply, i.e. when inlining
  1637     # pre-computed bundles. In that case, update the common
  1646     # pre-computed bundles. In that case, update the common
  1645     unfi = repo.unfiltered()
  1654     unfi = repo.unfiltered()
  1646     unficl = unfi.changelog
  1655     unficl = unfi.changelog
  1647 
  1656 
  1648     def headsofdiff(h1, h2):
  1657     def headsofdiff(h1, h2):
  1649         """Returns heads(h1 % h2)"""
  1658         """Returns heads(h1 % h2)"""
  1650         res = unfi.set('heads(%ln %% %ln)', h1, h2)
  1659         res = unfi.set(b'heads(%ln %% %ln)', h1, h2)
  1651         return set(ctx.node() for ctx in res)
  1660         return set(ctx.node() for ctx in res)
  1652 
  1661 
  1653     def headsofunion(h1, h2):
  1662     def headsofunion(h1, h2):
  1654         """Returns heads((h1 + h2) - null)"""
  1663         """Returns heads((h1 + h2) - null)"""
  1655         res = unfi.set('heads((%ln + %ln - null))', h1, h2)
  1664         res = unfi.set(b'heads((%ln + %ln - null))', h1, h2)
  1656         return set(ctx.node() for ctx in res)
  1665         return set(ctx.node() for ctx in res)
  1657 
  1666 
  1658     while True:
  1667     while True:
  1659         old_heads = unficl.heads()
  1668         old_heads = unficl.heads()
  1660         clstart = len(unficl)
  1669         clstart = len(unficl)
  1742     peerlocal = pullop.remote.local()
  1751     peerlocal = pullop.remote.local()
  1743     if peerlocal:
  1752     if peerlocal:
  1744         missing = set(peerlocal.requirements) - pullop.repo.supported
  1753         missing = set(peerlocal.requirements) - pullop.repo.supported
  1745         if missing:
  1754         if missing:
  1746             msg = _(
  1755             msg = _(
  1747                 "required features are not"
  1756                 b"required features are not"
  1748                 " supported in the destination:"
  1757                 b" supported in the destination:"
  1749                 " %s"
  1758                 b" %s"
  1750             ) % (', '.join(sorted(missing)))
  1759             ) % (b', '.join(sorted(missing)))
  1751             raise error.Abort(msg)
  1760             raise error.Abort(msg)
  1752 
  1761 
  1753     pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
  1762     pullop.trmanager = transactionmanager(repo, b'pull', remote.url())
  1754     wlock = util.nullcontextmanager()
  1763     wlock = util.nullcontextmanager()
  1755     if not bookmod.bookmarksinstore(repo):
  1764     if not bookmod.bookmarksinstore(repo):
  1756         wlock = repo.wlock()
  1765         wlock = repo.wlock()
  1757     with wlock, repo.lock(), pullop.trmanager:
  1766     with wlock, repo.lock(), pullop.trmanager:
  1758         # Use the modern wire protocol, if available.
  1767         # Use the modern wire protocol, if available.
  1759         if remote.capable('command-changesetdata'):
  1768         if remote.capable(b'command-changesetdata'):
  1760             exchangev2.pull(pullop)
  1769             exchangev2.pull(pullop)
  1761         else:
  1770         else:
  1762             # This should ideally be in _pullbundle2(). However, it needs to run
  1771             # This should ideally be in _pullbundle2(). However, it needs to run
  1763             # before discovery to avoid extra work.
  1772             # before discovery to avoid extra work.
  1764             _maybeapplyclonebundle(pullop)
  1773             _maybeapplyclonebundle(pullop)
  1770             _pullphase(pullop)
  1779             _pullphase(pullop)
  1771             _pullbookmarks(pullop)
  1780             _pullbookmarks(pullop)
  1772             _pullobsolete(pullop)
  1781             _pullobsolete(pullop)
  1773 
  1782 
  1774     # storing remotenames
  1783     # storing remotenames
  1775     if repo.ui.configbool('experimental', 'remotenames'):
  1784     if repo.ui.configbool(b'experimental', b'remotenames'):
  1776         logexchange.pullremotenames(repo, remote)
  1785         logexchange.pullremotenames(repo, remote)
  1777 
  1786 
  1778     return pullop
  1787     return pullop
  1779 
  1788 
  1780 
  1789 
  1811     for stepname in pulldiscoveryorder:
  1820     for stepname in pulldiscoveryorder:
  1812         step = pulldiscoverymapping[stepname]
  1821         step = pulldiscoverymapping[stepname]
  1813         step(pullop)
  1822         step(pullop)
  1814 
  1823 
  1815 
  1824 
  1816 @pulldiscovery('b1:bookmarks')
  1825 @pulldiscovery(b'b1:bookmarks')
  1817 def _pullbookmarkbundle1(pullop):
  1826 def _pullbookmarkbundle1(pullop):
  1818     """fetch bookmark data in bundle1 case
  1827     """fetch bookmark data in bundle1 case
  1819 
  1828 
  1820     If not using bundle2, we have to fetch bookmarks before changeset
  1829     If not using bundle2, we have to fetch bookmarks before changeset
  1821     discovery to reduce the chance and impact of race conditions."""
  1830     discovery to reduce the chance and impact of race conditions."""
  1822     if pullop.remotebookmarks is not None:
  1831     if pullop.remotebookmarks is not None:
  1823         return
  1832         return
  1824     if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
  1833     if pullop.canusebundle2 and b'listkeys' in pullop.remotebundle2caps:
  1825         # all known bundle2 servers now support listkeys, but lets be nice with
  1834         # all known bundle2 servers now support listkeys, but lets be nice with
  1826         # new implementation.
  1835         # new implementation.
  1827         return
  1836         return
  1828     books = listkeys(pullop.remote, 'bookmarks')
  1837     books = listkeys(pullop.remote, b'bookmarks')
  1829     pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
  1838     pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
  1830 
  1839 
  1831 
  1840 
  1832 @pulldiscovery('changegroup')
  1841 @pulldiscovery(b'changegroup')
  1833 def _pulldiscoverychangegroup(pullop):
  1842 def _pulldiscoverychangegroup(pullop):
  1834     """discovery phase for the pull
  1843     """discovery phase for the pull
  1835 
  1844 
  1836     Current handle changeset discovery only, will change handle all discovery
  1845     Current handle changeset discovery only, will change handle all discovery
  1837     at some point."""
  1846     at some point."""
  1864 
  1873 
  1865 def _pullbundle2(pullop):
  1874 def _pullbundle2(pullop):
  1866     """pull data using bundle2
  1875     """pull data using bundle2
  1867 
  1876 
  1868     For now, the only supported data are changegroup."""
  1877     For now, the only supported data are changegroup."""
  1869     kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
  1878     kwargs = {b'bundlecaps': caps20to10(pullop.repo, role=b'client')}
  1870 
  1879 
  1871     # make ui easier to access
  1880     # make ui easier to access
  1872     ui = pullop.repo.ui
  1881     ui = pullop.repo.ui
  1873 
  1882 
  1874     # At the moment we don't do stream clones over bundle2. If that is
  1883     # At the moment we don't do stream clones over bundle2. If that is
  1875     # implemented then here's where the check for that will go.
  1884     # implemented then here's where the check for that will go.
  1876     streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
  1885     streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
  1877 
  1886 
  1878     # declare pull perimeters
  1887     # declare pull perimeters
  1879     kwargs['common'] = pullop.common
  1888     kwargs[b'common'] = pullop.common
  1880     kwargs['heads'] = pullop.heads or pullop.rheads
  1889     kwargs[b'heads'] = pullop.heads or pullop.rheads
  1881 
  1890 
  1882     # check server supports narrow and then adding includepats and excludepats
  1891     # check server supports narrow and then adding includepats and excludepats
  1883     servernarrow = pullop.remote.capable(wireprototypes.NARROWCAP)
  1892     servernarrow = pullop.remote.capable(wireprototypes.NARROWCAP)
  1884     if servernarrow and pullop.includepats:
  1893     if servernarrow and pullop.includepats:
  1885         kwargs['includepats'] = pullop.includepats
  1894         kwargs[b'includepats'] = pullop.includepats
  1886     if servernarrow and pullop.excludepats:
  1895     if servernarrow and pullop.excludepats:
  1887         kwargs['excludepats'] = pullop.excludepats
  1896         kwargs[b'excludepats'] = pullop.excludepats
  1888 
  1897 
  1889     if streaming:
  1898     if streaming:
  1890         kwargs['cg'] = False
  1899         kwargs[b'cg'] = False
  1891         kwargs['stream'] = True
  1900         kwargs[b'stream'] = True
  1892         pullop.stepsdone.add('changegroup')
  1901         pullop.stepsdone.add(b'changegroup')
  1893         pullop.stepsdone.add('phases')
  1902         pullop.stepsdone.add(b'phases')
  1894 
  1903 
  1895     else:
  1904     else:
  1896         # pulling changegroup
  1905         # pulling changegroup
  1897         pullop.stepsdone.add('changegroup')
  1906         pullop.stepsdone.add(b'changegroup')
  1898 
  1907 
  1899         kwargs['cg'] = pullop.fetch
  1908         kwargs[b'cg'] = pullop.fetch
  1900 
  1909 
  1901         legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
  1910         legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
  1902         hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
  1911         hasbinaryphase = b'heads' in pullop.remotebundle2caps.get(b'phases', ())
  1903         if not legacyphase and hasbinaryphase:
  1912         if not legacyphase and hasbinaryphase:
  1904             kwargs['phases'] = True
  1913             kwargs[b'phases'] = True
  1905             pullop.stepsdone.add('phases')
  1914             pullop.stepsdone.add(b'phases')
  1906 
  1915 
  1907         if 'listkeys' in pullop.remotebundle2caps:
  1916         if b'listkeys' in pullop.remotebundle2caps:
  1908             if 'phases' not in pullop.stepsdone:
  1917             if b'phases' not in pullop.stepsdone:
  1909                 kwargs['listkeys'] = ['phases']
  1918                 kwargs[b'listkeys'] = [b'phases']
  1910 
  1919 
  1911     bookmarksrequested = False
  1920     bookmarksrequested = False
  1912     legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
  1921     legacybookmark = b'bookmarks' in ui.configlist(b'devel', b'legacy.exchange')
  1913     hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
  1922     hasbinarybook = b'bookmarks' in pullop.remotebundle2caps
  1914 
  1923 
  1915     if pullop.remotebookmarks is not None:
  1924     if pullop.remotebookmarks is not None:
  1916         pullop.stepsdone.add('request-bookmarks')
  1925         pullop.stepsdone.add(b'request-bookmarks')
  1917 
  1926 
  1918     if (
  1927     if (
  1919         'request-bookmarks' not in pullop.stepsdone
  1928         b'request-bookmarks' not in pullop.stepsdone
  1920         and pullop.remotebookmarks is None
  1929         and pullop.remotebookmarks is None
  1921         and not legacybookmark
  1930         and not legacybookmark
  1922         and hasbinarybook
  1931         and hasbinarybook
  1923     ):
  1932     ):
  1924         kwargs['bookmarks'] = True
  1933         kwargs[b'bookmarks'] = True
  1925         bookmarksrequested = True
  1934         bookmarksrequested = True
  1926 
  1935 
  1927     if 'listkeys' in pullop.remotebundle2caps:
  1936     if b'listkeys' in pullop.remotebundle2caps:
  1928         if 'request-bookmarks' not in pullop.stepsdone:
  1937         if b'request-bookmarks' not in pullop.stepsdone:
  1929             # make sure to always includes bookmark data when migrating
  1938             # make sure to always includes bookmark data when migrating
  1930             # `hg incoming --bundle` to using this function.
  1939             # `hg incoming --bundle` to using this function.
  1931             pullop.stepsdone.add('request-bookmarks')
  1940             pullop.stepsdone.add(b'request-bookmarks')
  1932             kwargs.setdefault('listkeys', []).append('bookmarks')
  1941             kwargs.setdefault(b'listkeys', []).append(b'bookmarks')
  1933 
  1942 
  1934     # If this is a full pull / clone and the server supports the clone bundles
  1943     # If this is a full pull / clone and the server supports the clone bundles
  1935     # feature, tell the server whether we attempted a clone bundle. The
  1944     # feature, tell the server whether we attempted a clone bundle. The
  1936     # presence of this flag indicates the client supports clone bundles. This
  1945     # presence of this flag indicates the client supports clone bundles. This
  1937     # will enable the server to treat clients that support clone bundles
  1946     # will enable the server to treat clients that support clone bundles
  1938     # differently from those that don't.
  1947     # differently from those that don't.
  1939     if (
  1948     if (
  1940         pullop.remote.capable('clonebundles')
  1949         pullop.remote.capable(b'clonebundles')
  1941         and pullop.heads is None
  1950         and pullop.heads is None
  1942         and list(pullop.common) == [nullid]
  1951         and list(pullop.common) == [nullid]
  1943     ):
  1952     ):
  1944         kwargs['cbattempted'] = pullop.clonebundleattempted
  1953         kwargs[b'cbattempted'] = pullop.clonebundleattempted
  1945 
  1954 
  1946     if streaming:
  1955     if streaming:
  1947         pullop.repo.ui.status(_('streaming all changes\n'))
  1956         pullop.repo.ui.status(_(b'streaming all changes\n'))
  1948     elif not pullop.fetch:
  1957     elif not pullop.fetch:
  1949         pullop.repo.ui.status(_("no changes found\n"))
  1958         pullop.repo.ui.status(_(b"no changes found\n"))
  1950         pullop.cgresult = 0
  1959         pullop.cgresult = 0
  1951     else:
  1960     else:
  1952         if pullop.heads is None and list(pullop.common) == [nullid]:
  1961         if pullop.heads is None and list(pullop.common) == [nullid]:
  1953             pullop.repo.ui.status(_("requesting all changes\n"))
  1962             pullop.repo.ui.status(_(b"requesting all changes\n"))
  1954     if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
  1963     if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
  1955         remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
  1964         remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
  1956         if obsolete.commonversion(remoteversions) is not None:
  1965         if obsolete.commonversion(remoteversions) is not None:
  1957             kwargs['obsmarkers'] = True
  1966             kwargs[b'obsmarkers'] = True
  1958             pullop.stepsdone.add('obsmarkers')
  1967             pullop.stepsdone.add(b'obsmarkers')
  1959     _pullbundle2extraprepare(pullop, kwargs)
  1968     _pullbundle2extraprepare(pullop, kwargs)
  1960 
  1969 
  1961     with pullop.remote.commandexecutor() as e:
  1970     with pullop.remote.commandexecutor() as e:
  1962         args = dict(kwargs)
  1971         args = dict(kwargs)
  1963         args['source'] = 'pull'
  1972         args[b'source'] = b'pull'
  1964         bundle = e.callcommand('getbundle', args).result()
  1973         bundle = e.callcommand(b'getbundle', args).result()
  1965 
  1974 
  1966         try:
  1975         try:
  1967             op = bundle2.bundleoperation(
  1976             op = bundle2.bundleoperation(
  1968                 pullop.repo, pullop.gettransaction, source='pull'
  1977                 pullop.repo, pullop.gettransaction, source=b'pull'
  1969             )
  1978             )
  1970             op.modes['bookmarks'] = 'records'
  1979             op.modes[b'bookmarks'] = b'records'
  1971             bundle2.processbundle(pullop.repo, bundle, op=op)
  1980             bundle2.processbundle(pullop.repo, bundle, op=op)
  1972         except bundle2.AbortFromPart as exc:
  1981         except bundle2.AbortFromPart as exc:
  1973             pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
  1982             pullop.repo.ui.status(_(b'remote: abort: %s\n') % exc)
  1974             raise error.Abort(_('pull failed on remote'), hint=exc.hint)
  1983             raise error.Abort(_(b'pull failed on remote'), hint=exc.hint)
  1975         except error.BundleValueError as exc:
  1984         except error.BundleValueError as exc:
  1976             raise error.Abort(_('missing support for %s') % exc)
  1985             raise error.Abort(_(b'missing support for %s') % exc)
  1977 
  1986 
  1978     if pullop.fetch:
  1987     if pullop.fetch:
  1979         pullop.cgresult = bundle2.combinechangegroupresults(op)
  1988         pullop.cgresult = bundle2.combinechangegroupresults(op)
  1980 
  1989 
  1981     # processing phases change
  1990     # processing phases change
  1982     for namespace, value in op.records['listkeys']:
  1991     for namespace, value in op.records[b'listkeys']:
  1983         if namespace == 'phases':
  1992         if namespace == b'phases':
  1984             _pullapplyphases(pullop, value)
  1993             _pullapplyphases(pullop, value)
  1985 
  1994 
  1986     # processing bookmark update
  1995     # processing bookmark update
  1987     if bookmarksrequested:
  1996     if bookmarksrequested:
  1988         books = {}
  1997         books = {}
  1989         for record in op.records['bookmarks']:
  1998         for record in op.records[b'bookmarks']:
  1990             books[record['bookmark']] = record["node"]
  1999             books[record[b'bookmark']] = record[b"node"]
  1991         pullop.remotebookmarks = books
  2000         pullop.remotebookmarks = books
  1992     else:
  2001     else:
  1993         for namespace, value in op.records['listkeys']:
  2002         for namespace, value in op.records[b'listkeys']:
  1994             if namespace == 'bookmarks':
  2003             if namespace == b'bookmarks':
  1995                 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
  2004                 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
  1996 
  2005 
  1997     # bookmark data were either already there or pulled in the bundle
  2006     # bookmark data were either already there or pulled in the bundle
  1998     if pullop.remotebookmarks is not None:
  2007     if pullop.remotebookmarks is not None:
  1999         _pullbookmarks(pullop)
  2008         _pullbookmarks(pullop)
  2006 def _pullchangeset(pullop):
  2015 def _pullchangeset(pullop):
  2007     """pull changeset from unbundle into the local repo"""
  2016     """pull changeset from unbundle into the local repo"""
  2008     # We delay the open of the transaction as late as possible so we
  2017     # We delay the open of the transaction as late as possible so we
  2009     # don't open transaction for nothing or you break future useful
  2018     # don't open transaction for nothing or you break future useful
  2010     # rollback call
  2019     # rollback call
  2011     if 'changegroup' in pullop.stepsdone:
  2020     if b'changegroup' in pullop.stepsdone:
  2012         return
  2021         return
  2013     pullop.stepsdone.add('changegroup')
  2022     pullop.stepsdone.add(b'changegroup')
  2014     if not pullop.fetch:
  2023     if not pullop.fetch:
  2015         pullop.repo.ui.status(_("no changes found\n"))
  2024         pullop.repo.ui.status(_(b"no changes found\n"))
  2016         pullop.cgresult = 0
  2025         pullop.cgresult = 0
  2017         return
  2026         return
  2018     tr = pullop.gettransaction()
  2027     tr = pullop.gettransaction()
  2019     if pullop.heads is None and list(pullop.common) == [nullid]:
  2028     if pullop.heads is None and list(pullop.common) == [nullid]:
  2020         pullop.repo.ui.status(_("requesting all changes\n"))
  2029         pullop.repo.ui.status(_(b"requesting all changes\n"))
  2021     elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
  2030     elif pullop.heads is None and pullop.remote.capable(b'changegroupsubset'):
  2022         # issue1320, avoid a race if remote changed after discovery
  2031         # issue1320, avoid a race if remote changed after discovery
  2023         pullop.heads = pullop.rheads
  2032         pullop.heads = pullop.rheads
  2024 
  2033 
  2025     if pullop.remote.capable('getbundle'):
  2034     if pullop.remote.capable(b'getbundle'):
  2026         # TODO: get bundlecaps from remote
  2035         # TODO: get bundlecaps from remote
  2027         cg = pullop.remote.getbundle(
  2036         cg = pullop.remote.getbundle(
  2028             'pull', common=pullop.common, heads=pullop.heads or pullop.rheads
  2037             b'pull', common=pullop.common, heads=pullop.heads or pullop.rheads
  2029         )
  2038         )
  2030     elif pullop.heads is None:
  2039     elif pullop.heads is None:
  2031         with pullop.remote.commandexecutor() as e:
  2040         with pullop.remote.commandexecutor() as e:
  2032             cg = e.callcommand(
  2041             cg = e.callcommand(
  2033                 'changegroup', {'nodes': pullop.fetch, 'source': 'pull',}
  2042                 b'changegroup', {b'nodes': pullop.fetch, b'source': b'pull',}
  2034             ).result()
  2043             ).result()
  2035 
  2044 
  2036     elif not pullop.remote.capable('changegroupsubset'):
  2045     elif not pullop.remote.capable(b'changegroupsubset'):
  2037         raise error.Abort(
  2046         raise error.Abort(
  2038             _(
  2047             _(
  2039                 "partial pull cannot be done because "
  2048                 b"partial pull cannot be done because "
  2040                 "other repository doesn't support "
  2049                 b"other repository doesn't support "
  2041                 "changegroupsubset."
  2050                 b"changegroupsubset."
  2042             )
  2051             )
  2043         )
  2052         )
  2044     else:
  2053     else:
  2045         with pullop.remote.commandexecutor() as e:
  2054         with pullop.remote.commandexecutor() as e:
  2046             cg = e.callcommand(
  2055             cg = e.callcommand(
  2047                 'changegroupsubset',
  2056                 b'changegroupsubset',
  2048                 {
  2057                 {
  2049                     'bases': pullop.fetch,
  2058                     b'bases': pullop.fetch,
  2050                     'heads': pullop.heads,
  2059                     b'heads': pullop.heads,
  2051                     'source': 'pull',
  2060                     b'source': b'pull',
  2052                 },
  2061                 },
  2053             ).result()
  2062             ).result()
  2054 
  2063 
  2055     bundleop = bundle2.applybundle(
  2064     bundleop = bundle2.applybundle(
  2056         pullop.repo, cg, tr, 'pull', pullop.remote.url()
  2065         pullop.repo, cg, tr, b'pull', pullop.remote.url()
  2057     )
  2066     )
  2058     pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
  2067     pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
  2059 
  2068 
  2060 
  2069 
  2061 def _pullphase(pullop):
  2070 def _pullphase(pullop):
  2062     # Get remote phases data from remote
  2071     # Get remote phases data from remote
  2063     if 'phases' in pullop.stepsdone:
  2072     if b'phases' in pullop.stepsdone:
  2064         return
  2073         return
  2065     remotephases = listkeys(pullop.remote, 'phases')
  2074     remotephases = listkeys(pullop.remote, b'phases')
  2066     _pullapplyphases(pullop, remotephases)
  2075     _pullapplyphases(pullop, remotephases)
  2067 
  2076 
  2068 
  2077 
  2069 def _pullapplyphases(pullop, remotephases):
  2078 def _pullapplyphases(pullop, remotephases):
  2070     """apply phase movement from observed remote state"""
  2079     """apply phase movement from observed remote state"""
  2071     if 'phases' in pullop.stepsdone:
  2080     if b'phases' in pullop.stepsdone:
  2072         return
  2081         return
  2073     pullop.stepsdone.add('phases')
  2082     pullop.stepsdone.add(b'phases')
  2074     publishing = bool(remotephases.get('publishing', False))
  2083     publishing = bool(remotephases.get(b'publishing', False))
  2075     if remotephases and not publishing:
  2084     if remotephases and not publishing:
  2076         # remote is new and non-publishing
  2085         # remote is new and non-publishing
  2077         pheads, _dr = phases.analyzeremotephases(
  2086         pheads, _dr = phases.analyzeremotephases(
  2078             pullop.repo, pullop.pulledsubset, remotephases
  2087             pullop.repo, pullop.pulledsubset, remotephases
  2079         )
  2088         )
  2102         phases.advanceboundary(pullop.repo, tr, draft, dheads)
  2111         phases.advanceboundary(pullop.repo, tr, draft, dheads)
  2103 
  2112 
  2104 
  2113 
  2105 def _pullbookmarks(pullop):
  2114 def _pullbookmarks(pullop):
  2106     """process the remote bookmark information to update the local one"""
  2115     """process the remote bookmark information to update the local one"""
  2107     if 'bookmarks' in pullop.stepsdone:
  2116     if b'bookmarks' in pullop.stepsdone:
  2108         return
  2117         return
  2109     pullop.stepsdone.add('bookmarks')
  2118     pullop.stepsdone.add(b'bookmarks')
  2110     repo = pullop.repo
  2119     repo = pullop.repo
  2111     remotebookmarks = pullop.remotebookmarks
  2120     remotebookmarks = pullop.remotebookmarks
  2112     bookmod.updatefromremote(
  2121     bookmod.updatefromremote(
  2113         repo.ui,
  2122         repo.ui,
  2114         repo,
  2123         repo,
  2125     The `gettransaction` is function that return the pull transaction, creating
  2134     The `gettransaction` is function that return the pull transaction, creating
  2126     one if necessary. We return the transaction to inform the calling code that
  2135     one if necessary. We return the transaction to inform the calling code that
  2127     a new transaction have been created (when applicable).
  2136     a new transaction have been created (when applicable).
  2128 
  2137 
  2129     Exists mostly to allow overriding for experimentation purpose"""
  2138     Exists mostly to allow overriding for experimentation purpose"""
  2130     if 'obsmarkers' in pullop.stepsdone:
  2139     if b'obsmarkers' in pullop.stepsdone:
  2131         return
  2140         return
  2132     pullop.stepsdone.add('obsmarkers')
  2141     pullop.stepsdone.add(b'obsmarkers')
  2133     tr = None
  2142     tr = None
  2134     if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
  2143     if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
  2135         pullop.repo.ui.debug('fetching remote obsolete markers\n')
  2144         pullop.repo.ui.debug(b'fetching remote obsolete markers\n')
  2136         remoteobs = listkeys(pullop.remote, 'obsolete')
  2145         remoteobs = listkeys(pullop.remote, b'obsolete')
  2137         if 'dump0' in remoteobs:
  2146         if b'dump0' in remoteobs:
  2138             tr = pullop.gettransaction()
  2147             tr = pullop.gettransaction()
  2139             markers = []
  2148             markers = []
  2140             for key in sorted(remoteobs, reverse=True):
  2149             for key in sorted(remoteobs, reverse=True):
  2141                 if key.startswith('dump'):
  2150                 if key.startswith(b'dump'):
  2142                     data = util.b85decode(remoteobs[key])
  2151                     data = util.b85decode(remoteobs[key])
  2143                     version, newmarks = obsolete._readmarkers(data)
  2152                     version, newmarks = obsolete._readmarkers(data)
  2144                     markers += newmarks
  2153                     markers += newmarks
  2145             if markers:
  2154             if markers:
  2146                 pullop.repo.obsstore.add(tr, markers)
  2155                 pullop.repo.obsstore.add(tr, markers)
  2154     This massages the named arguments for getbundle wire protocol commands
  2163     This massages the named arguments for getbundle wire protocol commands
  2155     so requested data is filtered through access control rules.
  2164     so requested data is filtered through access control rules.
  2156     """
  2165     """
  2157     ui = repo.ui
  2166     ui = repo.ui
  2158     # TODO this assumes existence of HTTP and is a layering violation.
  2167     # TODO this assumes existence of HTTP and is a layering violation.
  2159     username = ui.shortuser(ui.environ.get('REMOTE_USER') or ui.username())
  2168     username = ui.shortuser(ui.environ.get(b'REMOTE_USER') or ui.username())
  2160     user_includes = ui.configlist(
  2169     user_includes = ui.configlist(
  2161         _NARROWACL_SECTION,
  2170         _NARROWACL_SECTION,
  2162         username + '.includes',
  2171         username + b'.includes',
  2163         ui.configlist(_NARROWACL_SECTION, 'default.includes'),
  2172         ui.configlist(_NARROWACL_SECTION, b'default.includes'),
  2164     )
  2173     )
  2165     user_excludes = ui.configlist(
  2174     user_excludes = ui.configlist(
  2166         _NARROWACL_SECTION,
  2175         _NARROWACL_SECTION,
  2167         username + '.excludes',
  2176         username + b'.excludes',
  2168         ui.configlist(_NARROWACL_SECTION, 'default.excludes'),
  2177         ui.configlist(_NARROWACL_SECTION, b'default.excludes'),
  2169     )
  2178     )
  2170     if not user_includes:
  2179     if not user_includes:
  2171         raise error.Abort(
  2180         raise error.Abort(
  2172             _("{} configuration for user {} is empty").format(
  2181             _(b"{} configuration for user {} is empty").format(
  2173                 _NARROWACL_SECTION, username
  2182                 _NARROWACL_SECTION, username
  2174             )
  2183             )
  2175         )
  2184         )
  2176 
  2185 
  2177     user_includes = [
  2186     user_includes = [
  2178         'path:.' if p == '*' else 'path:' + p for p in user_includes
  2187         b'path:.' if p == b'*' else b'path:' + p for p in user_includes
  2179     ]
  2188     ]
  2180     user_excludes = [
  2189     user_excludes = [
  2181         'path:.' if p == '*' else 'path:' + p for p in user_excludes
  2190         b'path:.' if p == b'*' else b'path:' + p for p in user_excludes
  2182     ]
  2191     ]
  2183 
  2192 
  2184     req_includes = set(kwargs.get(r'includepats', []))
  2193     req_includes = set(kwargs.get(r'includepats', []))
  2185     req_excludes = set(kwargs.get(r'excludepats', []))
  2194     req_excludes = set(kwargs.get(r'excludepats', []))
  2186 
  2195 
  2188         req_includes, req_excludes, user_includes, user_excludes
  2197         req_includes, req_excludes, user_includes, user_excludes
  2189     )
  2198     )
  2190 
  2199 
  2191     if invalid_includes:
  2200     if invalid_includes:
  2192         raise error.Abort(
  2201         raise error.Abort(
  2193             _("The following includes are not accessible for {}: {}").format(
  2202             _(b"The following includes are not accessible for {}: {}").format(
  2194                 username, invalid_includes
  2203                 username, invalid_includes
  2195             )
  2204             )
  2196         )
  2205         )
  2197 
  2206 
  2198     new_args = {}
  2207     new_args = {}
  2263 
  2272 
  2264     def splithead(head):
  2273     def splithead(head):
  2265         r1, r2, r3 = sorted(ellipsisroots[head])
  2274         r1, r2, r3 = sorted(ellipsisroots[head])
  2266         for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)):
  2275         for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)):
  2267             mid = repo.revs(
  2276             mid = repo.revs(
  2268                 'sort(merge() & %d::%d & %d::%d, -rev)', nr1, head, nr2, head
  2277                 b'sort(merge() & %d::%d & %d::%d, -rev)', nr1, head, nr2, head
  2269             )
  2278             )
  2270             for j in mid:
  2279             for j in mid:
  2271                 if j == nr2:
  2280                 if j == nr2:
  2272                     return nr2, (nr1, nr2)
  2281                     return nr2, (nr1, nr2)
  2273                 if j not in ellipsisroots or len(ellipsisroots[j]) < 2:
  2282                 if j not in ellipsisroots or len(ellipsisroots[j]) < 2:
  2274                     return j, (nr1, nr2)
  2283                     return j, (nr1, nr2)
  2275         raise error.Abort(
  2284         raise error.Abort(
  2276             _('Failed to split up ellipsis node! head: %d, ' 'roots: %d %d %d')
  2285             _(
       
  2286                 b'Failed to split up ellipsis node! head: %d, '
       
  2287                 b'roots: %d %d %d'
       
  2288             )
  2277             % (head, r1, r2, r3)
  2289             % (head, r1, r2, r3)
  2278         )
  2290         )
  2279 
  2291 
  2280     missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs))
  2292     missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs))
  2281     visit = reversed(missing)
  2293     visit = reversed(missing)
  2336     return visitnodes, relevant_nodes, ellipsisroots
  2348     return visitnodes, relevant_nodes, ellipsisroots
  2337 
  2349 
  2338 
  2350 
  2339 def caps20to10(repo, role):
  2351 def caps20to10(repo, role):
  2340     """return a set with appropriate options to use bundle20 during getbundle"""
  2352     """return a set with appropriate options to use bundle20 during getbundle"""
  2341     caps = {'HG20'}
  2353     caps = {b'HG20'}
  2342     capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
  2354     capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
  2343     caps.add('bundle2=' + urlreq.quote(capsblob))
  2355     caps.add(b'bundle2=' + urlreq.quote(capsblob))
  2344     return caps
  2356     return caps
  2345 
  2357 
  2346 
  2358 
  2347 # List of names of steps to perform for a bundle2 for getbundle, order matters.
  2359 # List of names of steps to perform for a bundle2 for getbundle, order matters.
  2348 getbundle2partsorder = []
  2360 getbundle2partsorder = []
  2375     return dec
  2387     return dec
  2376 
  2388 
  2377 
  2389 
  2378 def bundle2requested(bundlecaps):
  2390 def bundle2requested(bundlecaps):
  2379     if bundlecaps is not None:
  2391     if bundlecaps is not None:
  2380         return any(cap.startswith('HG2') for cap in bundlecaps)
  2392         return any(cap.startswith(b'HG2') for cap in bundlecaps)
  2381     return False
  2393     return False
  2382 
  2394 
  2383 
  2395 
  2384 def getbundlechunks(
  2396 def getbundlechunks(
  2385     repo, source, heads=None, common=None, bundlecaps=None, **kwargs
  2397     repo, source, heads=None, common=None, bundlecaps=None, **kwargs
  2395     kwargs = pycompat.byteskwargs(kwargs)
  2407     kwargs = pycompat.byteskwargs(kwargs)
  2396     info = {}
  2408     info = {}
  2397     usebundle2 = bundle2requested(bundlecaps)
  2409     usebundle2 = bundle2requested(bundlecaps)
  2398     # bundle10 case
  2410     # bundle10 case
  2399     if not usebundle2:
  2411     if not usebundle2:
  2400         if bundlecaps and not kwargs.get('cg', True):
  2412         if bundlecaps and not kwargs.get(b'cg', True):
  2401             raise ValueError(_('request for bundle10 must include changegroup'))
  2413             raise ValueError(
       
  2414                 _(b'request for bundle10 must include changegroup')
       
  2415             )
  2402 
  2416 
  2403         if kwargs:
  2417         if kwargs:
  2404             raise ValueError(
  2418             raise ValueError(
  2405                 _('unsupported getbundle arguments: %s')
  2419                 _(b'unsupported getbundle arguments: %s')
  2406                 % ', '.join(sorted(kwargs.keys()))
  2420                 % b', '.join(sorted(kwargs.keys()))
  2407             )
  2421             )
  2408         outgoing = _computeoutgoing(repo, heads, common)
  2422         outgoing = _computeoutgoing(repo, heads, common)
  2409         info['bundleversion'] = 1
  2423         info[b'bundleversion'] = 1
  2410         return (
  2424         return (
  2411             info,
  2425             info,
  2412             changegroup.makestream(
  2426             changegroup.makestream(
  2413                 repo, outgoing, '01', source, bundlecaps=bundlecaps
  2427                 repo, outgoing, b'01', source, bundlecaps=bundlecaps
  2414             ),
  2428             ),
  2415         )
  2429         )
  2416 
  2430 
  2417     # bundle20 case
  2431     # bundle20 case
  2418     info['bundleversion'] = 2
  2432     info[b'bundleversion'] = 2
  2419     b2caps = {}
  2433     b2caps = {}
  2420     for bcaps in bundlecaps:
  2434     for bcaps in bundlecaps:
  2421         if bcaps.startswith('bundle2='):
  2435         if bcaps.startswith(b'bundle2='):
  2422             blob = urlreq.unquote(bcaps[len('bundle2=') :])
  2436             blob = urlreq.unquote(bcaps[len(b'bundle2=') :])
  2423             b2caps.update(bundle2.decodecaps(blob))
  2437             b2caps.update(bundle2.decodecaps(blob))
  2424     bundler = bundle2.bundle20(repo.ui, b2caps)
  2438     bundler = bundle2.bundle20(repo.ui, b2caps)
  2425 
  2439 
  2426     kwargs['heads'] = heads
  2440     kwargs[b'heads'] = heads
  2427     kwargs['common'] = common
  2441     kwargs[b'common'] = common
  2428 
  2442 
  2429     for name in getbundle2partsorder:
  2443     for name in getbundle2partsorder:
  2430         func = getbundle2partsmapping[name]
  2444         func = getbundle2partsmapping[name]
  2431         func(
  2445         func(
  2432             bundler,
  2446             bundler,
  2435             bundlecaps=bundlecaps,
  2449             bundlecaps=bundlecaps,
  2436             b2caps=b2caps,
  2450             b2caps=b2caps,
  2437             **pycompat.strkwargs(kwargs)
  2451             **pycompat.strkwargs(kwargs)
  2438         )
  2452         )
  2439 
  2453 
  2440     info['prefercompressed'] = bundler.prefercompressed
  2454     info[b'prefercompressed'] = bundler.prefercompressed
  2441 
  2455 
  2442     return info, bundler.getchunks()
  2456     return info, bundler.getchunks()
  2443 
  2457 
  2444 
  2458 
  2445 @getbundle2partsgenerator('stream2')
  2459 @getbundle2partsgenerator(b'stream2')
  2446 def _getbundlestream2(bundler, repo, *args, **kwargs):
  2460 def _getbundlestream2(bundler, repo, *args, **kwargs):
  2447     return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
  2461     return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
  2448 
  2462 
  2449 
  2463 
  2450 @getbundle2partsgenerator('changegroup')
  2464 @getbundle2partsgenerator(b'changegroup')
  2451 def _getbundlechangegrouppart(
  2465 def _getbundlechangegrouppart(
  2452     bundler,
  2466     bundler,
  2453     repo,
  2467     repo,
  2454     source,
  2468     source,
  2455     bundlecaps=None,
  2469     bundlecaps=None,
  2460 ):
  2474 ):
  2461     """add a changegroup part to the requested bundle"""
  2475     """add a changegroup part to the requested bundle"""
  2462     if not kwargs.get(r'cg', True):
  2476     if not kwargs.get(r'cg', True):
  2463         return
  2477         return
  2464 
  2478 
  2465     version = '01'
  2479     version = b'01'
  2466     cgversions = b2caps.get('changegroup')
  2480     cgversions = b2caps.get(b'changegroup')
  2467     if cgversions:  # 3.1 and 3.2 ship with an empty value
  2481     if cgversions:  # 3.1 and 3.2 ship with an empty value
  2468         cgversions = [
  2482         cgversions = [
  2469             v
  2483             v
  2470             for v in cgversions
  2484             for v in cgversions
  2471             if v in changegroup.supportedoutgoingversions(repo)
  2485             if v in changegroup.supportedoutgoingversions(repo)
  2472         ]
  2486         ]
  2473         if not cgversions:
  2487         if not cgversions:
  2474             raise error.Abort(_('no common changegroup version'))
  2488             raise error.Abort(_(b'no common changegroup version'))
  2475         version = max(cgversions)
  2489         version = max(cgversions)
  2476 
  2490 
  2477     outgoing = _computeoutgoing(repo, heads, common)
  2491     outgoing = _computeoutgoing(repo, heads, common)
  2478     if not outgoing.missing:
  2492     if not outgoing.missing:
  2479         return
  2493         return
  2487 
  2501 
  2488     cgstream = changegroup.makestream(
  2502     cgstream = changegroup.makestream(
  2489         repo, outgoing, version, source, bundlecaps=bundlecaps, matcher=matcher
  2503         repo, outgoing, version, source, bundlecaps=bundlecaps, matcher=matcher
  2490     )
  2504     )
  2491 
  2505 
  2492     part = bundler.newpart('changegroup', data=cgstream)
  2506     part = bundler.newpart(b'changegroup', data=cgstream)
  2493     if cgversions:
  2507     if cgversions:
  2494         part.addparam('version', version)
  2508         part.addparam(b'version', version)
  2495 
  2509 
  2496     part.addparam('nbchanges', '%d' % len(outgoing.missing), mandatory=False)
  2510     part.addparam(b'nbchanges', b'%d' % len(outgoing.missing), mandatory=False)
  2497 
  2511 
  2498     if 'treemanifest' in repo.requirements:
  2512     if b'treemanifest' in repo.requirements:
  2499         part.addparam('treemanifest', '1')
  2513         part.addparam(b'treemanifest', b'1')
  2500 
  2514 
  2501     if (
  2515     if (
  2502         kwargs.get(r'narrow', False)
  2516         kwargs.get(r'narrow', False)
  2503         and kwargs.get(r'narrow_acl', False)
  2517         and kwargs.get(r'narrow_acl', False)
  2504         and (include or exclude)
  2518         and (include or exclude)
  2505     ):
  2519     ):
  2506         # this is mandatory because otherwise ACL clients won't work
  2520         # this is mandatory because otherwise ACL clients won't work
  2507         narrowspecpart = bundler.newpart('Narrow:responsespec')
  2521         narrowspecpart = bundler.newpart(b'Narrow:responsespec')
  2508         narrowspecpart.data = '%s\0%s' % (
  2522         narrowspecpart.data = b'%s\0%s' % (
  2509             '\n'.join(include),
  2523             b'\n'.join(include),
  2510             '\n'.join(exclude),
  2524             b'\n'.join(exclude),
  2511         )
  2525         )
  2512 
  2526 
  2513 
  2527 
  2514 @getbundle2partsgenerator('bookmarks')
  2528 @getbundle2partsgenerator(b'bookmarks')
  2515 def _getbundlebookmarkpart(
  2529 def _getbundlebookmarkpart(
  2516     bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
  2530     bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
  2517 ):
  2531 ):
  2518     """add a bookmark part to the requested bundle"""
  2532     """add a bookmark part to the requested bundle"""
  2519     if not kwargs.get(r'bookmarks', False):
  2533     if not kwargs.get(r'bookmarks', False):
  2520         return
  2534         return
  2521     if 'bookmarks' not in b2caps:
  2535     if b'bookmarks' not in b2caps:
  2522         raise error.Abort(_('no common bookmarks exchange method'))
  2536         raise error.Abort(_(b'no common bookmarks exchange method'))
  2523     books = bookmod.listbinbookmarks(repo)
  2537     books = bookmod.listbinbookmarks(repo)
  2524     data = bookmod.binaryencode(books)
  2538     data = bookmod.binaryencode(books)
  2525     if data:
  2539     if data:
  2526         bundler.newpart('bookmarks', data=data)
  2540         bundler.newpart(b'bookmarks', data=data)
  2527 
  2541 
  2528 
  2542 
  2529 @getbundle2partsgenerator('listkeys')
  2543 @getbundle2partsgenerator(b'listkeys')
  2530 def _getbundlelistkeysparts(
  2544 def _getbundlelistkeysparts(
  2531     bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
  2545     bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
  2532 ):
  2546 ):
  2533     """add parts containing listkeys namespaces to the requested bundle"""
  2547     """add parts containing listkeys namespaces to the requested bundle"""
  2534     listkeys = kwargs.get(r'listkeys', ())
  2548     listkeys = kwargs.get(r'listkeys', ())
  2535     for namespace in listkeys:
  2549     for namespace in listkeys:
  2536         part = bundler.newpart('listkeys')
  2550         part = bundler.newpart(b'listkeys')
  2537         part.addparam('namespace', namespace)
  2551         part.addparam(b'namespace', namespace)
  2538         keys = repo.listkeys(namespace).items()
  2552         keys = repo.listkeys(namespace).items()
  2539         part.data = pushkey.encodekeys(keys)
  2553         part.data = pushkey.encodekeys(keys)
  2540 
  2554 
  2541 
  2555 
  2542 @getbundle2partsgenerator('obsmarkers')
  2556 @getbundle2partsgenerator(b'obsmarkers')
  2543 def _getbundleobsmarkerpart(
  2557 def _getbundleobsmarkerpart(
  2544     bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
  2558     bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
  2545 ):
  2559 ):
  2546     """add an obsolescence markers part to the requested bundle"""
  2560     """add an obsolescence markers part to the requested bundle"""
  2547     if kwargs.get(r'obsmarkers', False):
  2561     if kwargs.get(r'obsmarkers', False):
  2548         if heads is None:
  2562         if heads is None:
  2549             heads = repo.heads()
  2563             heads = repo.heads()
  2550         subset = [c.node() for c in repo.set('::%ln', heads)]
  2564         subset = [c.node() for c in repo.set(b'::%ln', heads)]
  2551         markers = repo.obsstore.relevantmarkers(subset)
  2565         markers = repo.obsstore.relevantmarkers(subset)
  2552         markers = sorted(markers)
  2566         markers = sorted(markers)
  2553         bundle2.buildobsmarkerspart(bundler, markers)
  2567         bundle2.buildobsmarkerspart(bundler, markers)
  2554 
  2568 
  2555 
  2569 
  2556 @getbundle2partsgenerator('phases')
  2570 @getbundle2partsgenerator(b'phases')
  2557 def _getbundlephasespart(
  2571 def _getbundlephasespart(
  2558     bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
  2572     bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
  2559 ):
  2573 ):
  2560     """add phase heads part to the requested bundle"""
  2574     """add phase heads part to the requested bundle"""
  2561     if kwargs.get(r'phases', False):
  2575     if kwargs.get(r'phases', False):
  2562         if not 'heads' in b2caps.get('phases'):
  2576         if not b'heads' in b2caps.get(b'phases'):
  2563             raise error.Abort(_('no common phases exchange method'))
  2577             raise error.Abort(_(b'no common phases exchange method'))
  2564         if heads is None:
  2578         if heads is None:
  2565             heads = repo.heads()
  2579             heads = repo.heads()
  2566 
  2580 
  2567         headsbyphase = collections.defaultdict(set)
  2581         headsbyphase = collections.defaultdict(set)
  2568         if repo.publishing():
  2582         if repo.publishing():
  2585             # intermediate public heads.
  2599             # intermediate public heads.
  2586             draftheads = headsbyphase.get(phases.draft, set())
  2600             draftheads = headsbyphase.get(phases.draft, set())
  2587             if draftheads:
  2601             if draftheads:
  2588                 publicheads = headsbyphase.get(phases.public, set())
  2602                 publicheads = headsbyphase.get(phases.public, set())
  2589 
  2603 
  2590                 revset = 'heads(only(%ln, %ln) and public())'
  2604                 revset = b'heads(only(%ln, %ln) and public())'
  2591                 extraheads = repo.revs(revset, draftheads, publicheads)
  2605                 extraheads = repo.revs(revset, draftheads, publicheads)
  2592                 for r in extraheads:
  2606                 for r in extraheads:
  2593                     headsbyphase[phases.public].add(node(r))
  2607                     headsbyphase[phases.public].add(node(r))
  2594 
  2608 
  2595         # transform data in a format used by the encoding function
  2609         # transform data in a format used by the encoding function
  2597         for phase in phases.allphases:
  2611         for phase in phases.allphases:
  2598             phasemapping.append(sorted(headsbyphase[phase]))
  2612             phasemapping.append(sorted(headsbyphase[phase]))
  2599 
  2613 
  2600         # generate the actual part
  2614         # generate the actual part
  2601         phasedata = phases.binaryencode(phasemapping)
  2615         phasedata = phases.binaryencode(phasemapping)
  2602         bundler.newpart('phase-heads', data=phasedata)
  2616         bundler.newpart(b'phase-heads', data=phasedata)
  2603 
  2617 
  2604 
  2618 
  2605 @getbundle2partsgenerator('hgtagsfnodes')
  2619 @getbundle2partsgenerator(b'hgtagsfnodes')
  2606 def _getbundletagsfnodes(
  2620 def _getbundletagsfnodes(
  2607     bundler,
  2621     bundler,
  2608     repo,
  2622     repo,
  2609     source,
  2623     source,
  2610     bundlecaps=None,
  2624     bundlecaps=None,
  2621     filenodes raw values.
  2635     filenodes raw values.
  2622     """
  2636     """
  2623     # Don't send unless:
  2637     # Don't send unless:
  2624     # - changeset are being exchanged,
  2638     # - changeset are being exchanged,
  2625     # - the client supports it.
  2639     # - the client supports it.
  2626     if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
  2640     if not (kwargs.get(r'cg', True) and b'hgtagsfnodes' in b2caps):
  2627         return
  2641         return
  2628 
  2642 
  2629     outgoing = _computeoutgoing(repo, heads, common)
  2643     outgoing = _computeoutgoing(repo, heads, common)
  2630     bundle2.addparttagsfnodescache(repo, bundler, outgoing)
  2644     bundle2.addparttagsfnodescache(repo, bundler, outgoing)
  2631 
  2645 
  2632 
  2646 
  2633 @getbundle2partsgenerator('cache:rev-branch-cache')
  2647 @getbundle2partsgenerator(b'cache:rev-branch-cache')
  2634 def _getbundlerevbranchcache(
  2648 def _getbundlerevbranchcache(
  2635     bundler,
  2649     bundler,
  2636     repo,
  2650     repo,
  2637     source,
  2651     source,
  2638     bundlecaps=None,
  2652     bundlecaps=None,
  2655     # - changeset are being exchanged,
  2669     # - changeset are being exchanged,
  2656     # - the client supports it.
  2670     # - the client supports it.
  2657     # - narrow bundle isn't in play (not currently compatible).
  2671     # - narrow bundle isn't in play (not currently compatible).
  2658     if (
  2672     if (
  2659         not kwargs.get(r'cg', True)
  2673         not kwargs.get(r'cg', True)
  2660         or 'rev-branch-cache' not in b2caps
  2674         or b'rev-branch-cache' not in b2caps
  2661         or kwargs.get(r'narrow', False)
  2675         or kwargs.get(r'narrow', False)
  2662         or repo.ui.has_section(_NARROWACL_SECTION)
  2676         or repo.ui.has_section(_NARROWACL_SECTION)
  2663     ):
  2677     ):
  2664         return
  2678         return
  2665 
  2679 
  2671     """check if the heads of a repo have been modified
  2685     """check if the heads of a repo have been modified
  2672 
  2686 
  2673     Used by peer for unbundling.
  2687     Used by peer for unbundling.
  2674     """
  2688     """
  2675     heads = repo.heads()
  2689     heads = repo.heads()
  2676     heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
  2690     heads_hash = hashlib.sha1(b''.join(sorted(heads))).digest()
  2677     if not (
  2691     if not (
  2678         their_heads == ['force']
  2692         their_heads == [b'force']
  2679         or their_heads == heads
  2693         or their_heads == heads
  2680         or their_heads == ['hashed', heads_hash]
  2694         or their_heads == [b'hashed', heads_hash]
  2681     ):
  2695     ):
  2682         # someone else committed/pushed/unbundled while we
  2696         # someone else committed/pushed/unbundled while we
  2683         # were transferring data
  2697         # were transferring data
  2684         raise error.PushRaced(
  2698         raise error.PushRaced(
  2685             'repository changed while %s - ' 'please try again' % context
  2699             b'repository changed while %s - ' b'please try again' % context
  2686         )
  2700         )
  2687 
  2701 
  2688 
  2702 
  2689 def unbundle(repo, cg, heads, source, url):
  2703 def unbundle(repo, cg, heads, source, url):
  2690     """Apply a bundle to a repo.
  2704     """Apply a bundle to a repo.
  2698     # need a transaction when processing a bundle2 stream
  2712     # need a transaction when processing a bundle2 stream
  2699     # [wlock, lock, tr] - needs to be an array so nested functions can modify it
  2713     # [wlock, lock, tr] - needs to be an array so nested functions can modify it
  2700     lockandtr = [None, None, None]
  2714     lockandtr = [None, None, None]
  2701     recordout = None
  2715     recordout = None
  2702     # quick fix for output mismatch with bundle2 in 3.4
  2716     # quick fix for output mismatch with bundle2 in 3.4
  2703     captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
  2717     captureoutput = repo.ui.configbool(
  2704     if url.startswith('remote:http:') or url.startswith('remote:https:'):
  2718         b'experimental', b'bundle2-output-capture'
       
  2719     )
       
  2720     if url.startswith(b'remote:http:') or url.startswith(b'remote:https:'):
  2705         captureoutput = True
  2721         captureoutput = True
  2706     try:
  2722     try:
  2707         # note: outside bundle1, 'heads' is expected to be empty and this
  2723         # note: outside bundle1, 'heads' is expected to be empty and this
  2708         # 'check_heads' call wil be a no-op
  2724         # 'check_heads' call wil be a no-op
  2709         check_heads(repo, heads, 'uploading changes')
  2725         check_heads(repo, heads, b'uploading changes')
  2710         # push can proceed
  2726         # push can proceed
  2711         if not isinstance(cg, bundle2.unbundle20):
  2727         if not isinstance(cg, bundle2.unbundle20):
  2712             # legacy case: bundle1 (changegroup 01)
  2728             # legacy case: bundle1 (changegroup 01)
  2713             txnname = "\n".join([source, util.hidepassword(url)])
  2729             txnname = b"\n".join([source, util.hidepassword(url)])
  2714             with repo.lock(), repo.transaction(txnname) as tr:
  2730             with repo.lock(), repo.transaction(txnname) as tr:
  2715                 op = bundle2.applybundle(repo, cg, tr, source, url)
  2731                 op = bundle2.applybundle(repo, cg, tr, source, url)
  2716                 r = bundle2.combinechangegroupresults(op)
  2732                 r = bundle2.combinechangegroupresults(op)
  2717         else:
  2733         else:
  2718             r = None
  2734             r = None
  2722                     if not lockandtr[2]:
  2738                     if not lockandtr[2]:
  2723                         if not bookmod.bookmarksinstore(repo):
  2739                         if not bookmod.bookmarksinstore(repo):
  2724                             lockandtr[0] = repo.wlock()
  2740                             lockandtr[0] = repo.wlock()
  2725                         lockandtr[1] = repo.lock()
  2741                         lockandtr[1] = repo.lock()
  2726                         lockandtr[2] = repo.transaction(source)
  2742                         lockandtr[2] = repo.transaction(source)
  2727                         lockandtr[2].hookargs['source'] = source
  2743                         lockandtr[2].hookargs[b'source'] = source
  2728                         lockandtr[2].hookargs['url'] = url
  2744                         lockandtr[2].hookargs[b'url'] = url
  2729                         lockandtr[2].hookargs['bundle2'] = '1'
  2745                         lockandtr[2].hookargs[b'bundle2'] = b'1'
  2730                     return lockandtr[2]
  2746                     return lockandtr[2]
  2731 
  2747 
  2732                 # Do greedy locking by default until we're satisfied with lazy
  2748                 # Do greedy locking by default until we're satisfied with lazy
  2733                 # locking.
  2749                 # locking.
  2734                 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
  2750                 if not repo.ui.configbool(
       
  2751                     b'experimental', b'bundle2lazylocking'
       
  2752                 ):
  2735                     gettransaction()
  2753                     gettransaction()
  2736 
  2754 
  2737                 op = bundle2.bundleoperation(
  2755                 op = bundle2.bundleoperation(
  2738                     repo,
  2756                     repo,
  2739                     gettransaction,
  2757                     gettransaction,
  2740                     captureoutput=captureoutput,
  2758                     captureoutput=captureoutput,
  2741                     source='push',
  2759                     source=b'push',
  2742                 )
  2760                 )
  2743                 try:
  2761                 try:
  2744                     op = bundle2.processbundle(repo, cg, op=op)
  2762                     op = bundle2.processbundle(repo, cg, op=op)
  2745                 finally:
  2763                 finally:
  2746                     r = op.reply
  2764                     r = op.reply
  2747                     if captureoutput and r is not None:
  2765                     if captureoutput and r is not None:
  2748                         repo.ui.pushbuffer(error=True, subproc=True)
  2766                         repo.ui.pushbuffer(error=True, subproc=True)
  2749 
  2767 
  2750                         def recordout(output):
  2768                         def recordout(output):
  2751                             r.newpart('output', data=output, mandatory=False)
  2769                             r.newpart(b'output', data=output, mandatory=False)
  2752 
  2770 
  2753                 if lockandtr[2] is not None:
  2771                 if lockandtr[2] is not None:
  2754                     lockandtr[2].close()
  2772                     lockandtr[2].close()
  2755             except BaseException as exc:
  2773             except BaseException as exc:
  2756                 exc.duringunbundle2 = True
  2774                 exc.duringunbundle2 = True
  2757                 if captureoutput and r is not None:
  2775                 if captureoutput and r is not None:
  2758                     parts = exc._bundle2salvagedoutput = r.salvageoutput()
  2776                     parts = exc._bundle2salvagedoutput = r.salvageoutput()
  2759 
  2777 
  2760                     def recordout(output):
  2778                     def recordout(output):
  2761                         part = bundle2.bundlepart(
  2779                         part = bundle2.bundlepart(
  2762                             'output', data=output, mandatory=False
  2780                             b'output', data=output, mandatory=False
  2763                         )
  2781                         )
  2764                         parts.append(part)
  2782                         parts.append(part)
  2765 
  2783 
  2766                 raise
  2784                 raise
  2767     finally:
  2785     finally:
  2775     """Apply a clone bundle from a remote, if possible."""
  2793     """Apply a clone bundle from a remote, if possible."""
  2776 
  2794 
  2777     repo = pullop.repo
  2795     repo = pullop.repo
  2778     remote = pullop.remote
  2796     remote = pullop.remote
  2779 
  2797 
  2780     if not repo.ui.configbool('ui', 'clonebundles'):
  2798     if not repo.ui.configbool(b'ui', b'clonebundles'):
  2781         return
  2799         return
  2782 
  2800 
  2783     # Only run if local repo is empty.
  2801     # Only run if local repo is empty.
  2784     if len(repo):
  2802     if len(repo):
  2785         return
  2803         return
  2786 
  2804 
  2787     if pullop.heads:
  2805     if pullop.heads:
  2788         return
  2806         return
  2789 
  2807 
  2790     if not remote.capable('clonebundles'):
  2808     if not remote.capable(b'clonebundles'):
  2791         return
  2809         return
  2792 
  2810 
  2793     with remote.commandexecutor() as e:
  2811     with remote.commandexecutor() as e:
  2794         res = e.callcommand('clonebundles', {}).result()
  2812         res = e.callcommand(b'clonebundles', {}).result()
  2795 
  2813 
  2796     # If we call the wire protocol command, that's good enough to record the
  2814     # If we call the wire protocol command, that's good enough to record the
  2797     # attempt.
  2815     # attempt.
  2798     pullop.clonebundleattempted = True
  2816     pullop.clonebundleattempted = True
  2799 
  2817 
  2800     entries = parseclonebundlesmanifest(repo, res)
  2818     entries = parseclonebundlesmanifest(repo, res)
  2801     if not entries:
  2819     if not entries:
  2802         repo.ui.note(
  2820         repo.ui.note(
  2803             _(
  2821             _(
  2804                 'no clone bundles available on remote; '
  2822                 b'no clone bundles available on remote; '
  2805                 'falling back to regular clone\n'
  2823                 b'falling back to regular clone\n'
  2806             )
  2824             )
  2807         )
  2825         )
  2808         return
  2826         return
  2809 
  2827 
  2810     entries = filterclonebundleentries(
  2828     entries = filterclonebundleentries(
  2817         # they deserve what's coming. Furthermore, from a client's
  2835         # they deserve what's coming. Furthermore, from a client's
  2818         # perspective, no automatic fallback would mean not being able to
  2836         # perspective, no automatic fallback would mean not being able to
  2819         # clone!
  2837         # clone!
  2820         repo.ui.warn(
  2838         repo.ui.warn(
  2821             _(
  2839             _(
  2822                 'no compatible clone bundles available on server; '
  2840                 b'no compatible clone bundles available on server; '
  2823                 'falling back to regular clone\n'
  2841                 b'falling back to regular clone\n'
  2824             )
  2842             )
  2825         )
  2843         )
  2826         repo.ui.warn(
  2844         repo.ui.warn(
  2827             _('(you may want to report this to the server ' 'operator)\n')
  2845             _(b'(you may want to report this to the server ' b'operator)\n')
  2828         )
  2846         )
  2829         return
  2847         return
  2830 
  2848 
  2831     entries = sortclonebundleentries(repo.ui, entries)
  2849     entries = sortclonebundleentries(repo.ui, entries)
  2832 
  2850 
  2833     url = entries[0]['URL']
  2851     url = entries[0][b'URL']
  2834     repo.ui.status(_('applying clone bundle from %s\n') % url)
  2852     repo.ui.status(_(b'applying clone bundle from %s\n') % url)
  2835     if trypullbundlefromurl(repo.ui, repo, url):
  2853     if trypullbundlefromurl(repo.ui, repo, url):
  2836         repo.ui.status(_('finished applying clone bundle\n'))
  2854         repo.ui.status(_(b'finished applying clone bundle\n'))
  2837     # Bundle failed.
  2855     # Bundle failed.
  2838     #
  2856     #
  2839     # We abort by default to avoid the thundering herd of
  2857     # We abort by default to avoid the thundering herd of
  2840     # clients flooding a server that was expecting expensive
  2858     # clients flooding a server that was expecting expensive
  2841     # clone load to be offloaded.
  2859     # clone load to be offloaded.
  2842     elif repo.ui.configbool('ui', 'clonebundlefallback'):
  2860     elif repo.ui.configbool(b'ui', b'clonebundlefallback'):
  2843         repo.ui.warn(_('falling back to normal clone\n'))
  2861         repo.ui.warn(_(b'falling back to normal clone\n'))
  2844     else:
  2862     else:
  2845         raise error.Abort(
  2863         raise error.Abort(
  2846             _('error applying bundle'),
  2864             _(b'error applying bundle'),
  2847             hint=_(
  2865             hint=_(
  2848                 'if this error persists, consider contacting '
  2866                 b'if this error persists, consider contacting '
  2849                 'the server operator or disable clone '
  2867                 b'the server operator or disable clone '
  2850                 'bundles via '
  2868                 b'bundles via '
  2851                 '"--config ui.clonebundles=false"'
  2869                 b'"--config ui.clonebundles=false"'
  2852             ),
  2870             ),
  2853         )
  2871         )
  2854 
  2872 
  2855 
  2873 
  2856 def parseclonebundlesmanifest(repo, s):
  2874 def parseclonebundlesmanifest(repo, s):
  2862     m = []
  2880     m = []
  2863     for line in s.splitlines():
  2881     for line in s.splitlines():
  2864         fields = line.split()
  2882         fields = line.split()
  2865         if not fields:
  2883         if not fields:
  2866             continue
  2884             continue
  2867         attrs = {'URL': fields[0]}
  2885         attrs = {b'URL': fields[0]}
  2868         for rawattr in fields[1:]:
  2886         for rawattr in fields[1:]:
  2869             key, value = rawattr.split('=', 1)
  2887             key, value = rawattr.split(b'=', 1)
  2870             key = urlreq.unquote(key)
  2888             key = urlreq.unquote(key)
  2871             value = urlreq.unquote(value)
  2889             value = urlreq.unquote(value)
  2872             attrs[key] = value
  2890             attrs[key] = value
  2873 
  2891 
  2874             # Parse BUNDLESPEC into components. This makes client-side
  2892             # Parse BUNDLESPEC into components. This makes client-side
  2875             # preferences easier to specify since you can prefer a single
  2893             # preferences easier to specify since you can prefer a single
  2876             # component of the BUNDLESPEC.
  2894             # component of the BUNDLESPEC.
  2877             if key == 'BUNDLESPEC':
  2895             if key == b'BUNDLESPEC':
  2878                 try:
  2896                 try:
  2879                     bundlespec = parsebundlespec(repo, value)
  2897                     bundlespec = parsebundlespec(repo, value)
  2880                     attrs['COMPRESSION'] = bundlespec.compression
  2898                     attrs[b'COMPRESSION'] = bundlespec.compression
  2881                     attrs['VERSION'] = bundlespec.version
  2899                     attrs[b'VERSION'] = bundlespec.version
  2882                 except error.InvalidBundleSpecification:
  2900                 except error.InvalidBundleSpecification:
  2883                     pass
  2901                     pass
  2884                 except error.UnsupportedBundleSpecification:
  2902                 except error.UnsupportedBundleSpecification:
  2885                     pass
  2903                     pass
  2886 
  2904 
  2889     return m
  2907     return m
  2890 
  2908 
  2891 
  2909 
  2892 def isstreamclonespec(bundlespec):
  2910 def isstreamclonespec(bundlespec):
  2893     # Stream clone v1
  2911     # Stream clone v1
  2894     if bundlespec.wirecompression == 'UN' and bundlespec.wireversion == 's1':
  2912     if bundlespec.wirecompression == b'UN' and bundlespec.wireversion == b's1':
  2895         return True
  2913         return True
  2896 
  2914 
  2897     # Stream clone v2
  2915     # Stream clone v2
  2898     if (
  2916     if (
  2899         bundlespec.wirecompression == 'UN'
  2917         bundlespec.wirecompression == b'UN'
  2900         and bundlespec.wireversion == '02'
  2918         and bundlespec.wireversion == b'02'
  2901         and bundlespec.contentopts.get('streamv2')
  2919         and bundlespec.contentopts.get(b'streamv2')
  2902     ):
  2920     ):
  2903         return True
  2921         return True
  2904 
  2922 
  2905     return False
  2923     return False
  2906 
  2924 
  2915     There is no guarantee we'll be able to apply all returned entries because
  2933     There is no guarantee we'll be able to apply all returned entries because
  2916     the metadata we use to filter on may be missing or wrong.
  2934     the metadata we use to filter on may be missing or wrong.
  2917     """
  2935     """
  2918     newentries = []
  2936     newentries = []
  2919     for entry in entries:
  2937     for entry in entries:
  2920         spec = entry.get('BUNDLESPEC')
  2938         spec = entry.get(b'BUNDLESPEC')
  2921         if spec:
  2939         if spec:
  2922             try:
  2940             try:
  2923                 bundlespec = parsebundlespec(repo, spec, strict=True)
  2941                 bundlespec = parsebundlespec(repo, spec, strict=True)
  2924 
  2942 
  2925                 # If a stream clone was requested, filter out non-streamclone
  2943                 # If a stream clone was requested, filter out non-streamclone
  2926                 # entries.
  2944                 # entries.
  2927                 if streamclonerequested and not isstreamclonespec(bundlespec):
  2945                 if streamclonerequested and not isstreamclonespec(bundlespec):
  2928                     repo.ui.debug(
  2946                     repo.ui.debug(
  2929                         'filtering %s because not a stream clone\n'
  2947                         b'filtering %s because not a stream clone\n'
  2930                         % entry['URL']
  2948                         % entry[b'URL']
  2931                     )
  2949                     )
  2932                     continue
  2950                     continue
  2933 
  2951 
  2934             except error.InvalidBundleSpecification as e:
  2952             except error.InvalidBundleSpecification as e:
  2935                 repo.ui.debug(stringutil.forcebytestr(e) + '\n')
  2953                 repo.ui.debug(stringutil.forcebytestr(e) + b'\n')
  2936                 continue
  2954                 continue
  2937             except error.UnsupportedBundleSpecification as e:
  2955             except error.UnsupportedBundleSpecification as e:
  2938                 repo.ui.debug(
  2956                 repo.ui.debug(
  2939                     'filtering %s because unsupported bundle '
  2957                     b'filtering %s because unsupported bundle '
  2940                     'spec: %s\n' % (entry['URL'], stringutil.forcebytestr(e))
  2958                     b'spec: %s\n' % (entry[b'URL'], stringutil.forcebytestr(e))
  2941                 )
  2959                 )
  2942                 continue
  2960                 continue
  2943         # If we don't have a spec and requested a stream clone, we don't know
  2961         # If we don't have a spec and requested a stream clone, we don't know
  2944         # what the entry is so don't attempt to apply it.
  2962         # what the entry is so don't attempt to apply it.
  2945         elif streamclonerequested:
  2963         elif streamclonerequested:
  2946             repo.ui.debug(
  2964             repo.ui.debug(
  2947                 'filtering %s because cannot determine if a stream '
  2965                 b'filtering %s because cannot determine if a stream '
  2948                 'clone bundle\n' % entry['URL']
  2966                 b'clone bundle\n' % entry[b'URL']
  2949             )
  2967             )
  2950             continue
  2968             continue
  2951 
  2969 
  2952         if 'REQUIRESNI' in entry and not sslutil.hassni:
  2970         if b'REQUIRESNI' in entry and not sslutil.hassni:
  2953             repo.ui.debug(
  2971             repo.ui.debug(
  2954                 'filtering %s because SNI not supported\n' % entry['URL']
  2972                 b'filtering %s because SNI not supported\n' % entry[b'URL']
  2955             )
  2973             )
  2956             continue
  2974             continue
  2957 
  2975 
  2958         newentries.append(entry)
  2976         newentries.append(entry)
  2959 
  2977 
  3024     def __ne__(self, other):
  3042     def __ne__(self, other):
  3025         return self._cmp(other) != 0
  3043         return self._cmp(other) != 0
  3026 
  3044 
  3027 
  3045 
  3028 def sortclonebundleentries(ui, entries):
  3046 def sortclonebundleentries(ui, entries):
  3029     prefers = ui.configlist('ui', 'clonebundleprefers')
  3047     prefers = ui.configlist(b'ui', b'clonebundleprefers')
  3030     if not prefers:
  3048     if not prefers:
  3031         return list(entries)
  3049         return list(entries)
  3032 
  3050 
  3033     prefers = [p.split('=', 1) for p in prefers]
  3051     prefers = [p.split(b'=', 1) for p in prefers]
  3034 
  3052 
  3035     items = sorted(clonebundleentry(v, prefers) for v in entries)
  3053     items = sorted(clonebundleentry(v, prefers) for v in entries)
  3036     return [i.value for i in items]
  3054     return [i.value for i in items]
  3037 
  3055 
  3038 
  3056 
  3039 def trypullbundlefromurl(ui, repo, url):
  3057 def trypullbundlefromurl(ui, repo, url):
  3040     """Attempt to apply a bundle from a URL."""
  3058     """Attempt to apply a bundle from a URL."""
  3041     with repo.lock(), repo.transaction('bundleurl') as tr:
  3059     with repo.lock(), repo.transaction(b'bundleurl') as tr:
  3042         try:
  3060         try:
  3043             fh = urlmod.open(ui, url)
  3061             fh = urlmod.open(ui, url)
  3044             cg = readbundle(ui, fh, 'stream')
  3062             cg = readbundle(ui, fh, b'stream')
  3045 
  3063 
  3046             if isinstance(cg, streamclone.streamcloneapplier):
  3064             if isinstance(cg, streamclone.streamcloneapplier):
  3047                 cg.apply(repo)
  3065                 cg.apply(repo)
  3048             else:
  3066             else:
  3049                 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
  3067                 bundle2.applybundle(repo, cg, tr, b'clonebundles', url)
  3050             return True
  3068             return True
  3051         except urlerr.httperror as e:
  3069         except urlerr.httperror as e:
  3052             ui.warn(
  3070             ui.warn(
  3053                 _('HTTP error fetching bundle: %s\n')
  3071                 _(b'HTTP error fetching bundle: %s\n')
  3054                 % stringutil.forcebytestr(e)
  3072                 % stringutil.forcebytestr(e)
  3055             )
  3073             )
  3056         except urlerr.urlerror as e:
  3074         except urlerr.urlerror as e:
  3057             ui.warn(
  3075             ui.warn(
  3058                 _('error fetching bundle: %s\n')
  3076                 _(b'error fetching bundle: %s\n')
  3059                 % stringutil.forcebytestr(e.reason)
  3077                 % stringutil.forcebytestr(e.reason)
  3060             )
  3078             )
  3061 
  3079 
  3062         return False
  3080         return False