formatting: byteify all mercurial/ and hgext/ string literals
authorAugie Fackler <augie@google.com>
Sun, 06 Oct 2019 09:48:39 -0400
changeset 43077 687b865b95ad
parent 43076 2372284d9457
child 43078 88eba7103660
formatting: byteify all mercurial/ and hgext/ string literals Done with python3.7 contrib/byteify-strings.py -i $(hg files 'set:mercurial/**.py - mercurial/thirdparty/** + hgext/**.py - hgext/fsmonitor/pywatchman/** - mercurial/__init__.py') black -l 80 -t py33 -S $(hg files 'set:**.py - mercurial/thirdparty/** - "contrib/python-zstandard/**" - hgext/fsmonitor/pywatchman/**') # skip-blame mass-reformatting only Differential Revision: https://phab.mercurial-scm.org/D6972
hgext/absorb.py
hgext/acl.py
hgext/amend.py
hgext/automv.py
hgext/beautifygraph.py
hgext/blackbox.py
hgext/bookflow.py
hgext/bugzilla.py
hgext/censor.py
hgext/children.py
hgext/churn.py
hgext/clonebundles.py
hgext/closehead.py
hgext/commitextras.py
hgext/convert/__init__.py
hgext/convert/bzr.py
hgext/convert/common.py
hgext/convert/convcmd.py
hgext/convert/cvs.py
hgext/convert/cvsps.py
hgext/convert/darcs.py
hgext/convert/filemap.py
hgext/convert/git.py
hgext/convert/gnuarch.py
hgext/convert/hg.py
hgext/convert/monotone.py
hgext/convert/p4.py
hgext/convert/subversion.py
hgext/convert/transport.py
hgext/eol.py
hgext/extdiff.py
hgext/factotum.py
hgext/fastannotate/__init__.py
hgext/fastannotate/commands.py
hgext/fastannotate/context.py
hgext/fastannotate/formatter.py
hgext/fastannotate/protocol.py
hgext/fastannotate/revmap.py
hgext/fastannotate/support.py
hgext/fetch.py
hgext/fix.py
hgext/fsmonitor/__init__.py
hgext/fsmonitor/state.py
hgext/fsmonitor/watchmanclient.py
hgext/githelp.py
hgext/gpg.py
hgext/graphlog.py
hgext/hgk.py
hgext/highlight/__init__.py
hgext/highlight/highlight.py
hgext/histedit.py
hgext/infinitepush/__init__.py
hgext/infinitepush/bundleparts.py
hgext/infinitepush/common.py
hgext/infinitepush/fileindexapi.py
hgext/infinitepush/sqlindexapi.py
hgext/infinitepush/store.py
hgext/journal.py
hgext/keyword.py
hgext/largefiles/__init__.py
hgext/largefiles/basestore.py
hgext/largefiles/lfcommands.py
hgext/largefiles/lfutil.py
hgext/largefiles/localstore.py
hgext/largefiles/overrides.py
hgext/largefiles/proto.py
hgext/largefiles/remotestore.py
hgext/largefiles/reposetup.py
hgext/largefiles/storefactory.py
hgext/largefiles/wirestore.py
hgext/lfs/__init__.py
hgext/lfs/blobstore.py
hgext/lfs/pointer.py
hgext/lfs/wireprotolfsserver.py
hgext/lfs/wrapper.py
hgext/logtoprocess.py
hgext/mq.py
hgext/narrow/__init__.py
hgext/narrow/narrowbundle2.py
hgext/narrow/narrowcommands.py
hgext/narrow/narrowdirstate.py
hgext/narrow/narrowtemplates.py
hgext/narrow/narrowwirepeer.py
hgext/notify.py
hgext/pager.py
hgext/patchbomb.py
hgext/phabricator.py
hgext/purge.py
hgext/rebase.py
hgext/record.py
hgext/releasenotes.py
hgext/relink.py
hgext/remotefilelog/__init__.py
hgext/remotefilelog/basepack.py
hgext/remotefilelog/basestore.py
hgext/remotefilelog/connectionpool.py
hgext/remotefilelog/constants.py
hgext/remotefilelog/contentstore.py
hgext/remotefilelog/datapack.py
hgext/remotefilelog/debugcommands.py
hgext/remotefilelog/fileserverclient.py
hgext/remotefilelog/historypack.py
hgext/remotefilelog/metadatastore.py
hgext/remotefilelog/remotefilectx.py
hgext/remotefilelog/remotefilelog.py
hgext/remotefilelog/remotefilelogserver.py
hgext/remotefilelog/repack.py
hgext/remotefilelog/shallowbundle.py
hgext/remotefilelog/shallowrepo.py
hgext/remotefilelog/shallowutil.py
hgext/remotefilelog/shallowverifier.py
hgext/remotenames.py
hgext/schemes.py
hgext/share.py
hgext/show.py
hgext/sparse.py
hgext/split.py
hgext/sqlitestore.py
hgext/strip.py
hgext/transplant.py
hgext/uncommit.py
hgext/win32mbcs.py
hgext/win32text.py
hgext/zeroconf/Zeroconf.py
hgext/zeroconf/__init__.py
mercurial/archival.py
mercurial/bookmarks.py
mercurial/branchmap.py
mercurial/bundle2.py
mercurial/bundlerepo.py
mercurial/cacheutil.py
mercurial/cffi/bdiff.py
mercurial/cffi/bdiffbuild.py
mercurial/cffi/mpatch.py
mercurial/cffi/mpatchbuild.py
mercurial/cffi/osutil.py
mercurial/cffi/osutilbuild.py
mercurial/changegroup.py
mercurial/changelog.py
mercurial/chgserver.py
mercurial/cmdutil.py
mercurial/color.py
mercurial/commands.py
mercurial/commandserver.py
mercurial/config.py
mercurial/configitems.py
mercurial/context.py
mercurial/copies.py
mercurial/crecord.py
mercurial/dagop.py
mercurial/dagparser.py
mercurial/debugcommands.py
mercurial/destutil.py
mercurial/diffhelper.py
mercurial/diffutil.py
mercurial/dirstate.py
mercurial/dirstateguard.py
mercurial/discovery.py
mercurial/dispatch.py
mercurial/encoding.py
mercurial/error.py
mercurial/exchange.py
mercurial/exchangev2.py
mercurial/extensions.py
mercurial/exthelper.py
mercurial/fancyopts.py
mercurial/filelog.py
mercurial/filemerge.py
mercurial/fileset.py
mercurial/filesetlang.py
mercurial/formatter.py
mercurial/graphmod.py
mercurial/hbisect.py
mercurial/help.py
mercurial/hg.py
mercurial/hgweb/__init__.py
mercurial/hgweb/common.py
mercurial/hgweb/hgweb_mod.py
mercurial/hgweb/hgwebdir_mod.py
mercurial/hgweb/request.py
mercurial/hgweb/server.py
mercurial/hgweb/webcommands.py
mercurial/hgweb/webutil.py
mercurial/hgweb/wsgicgi.py
mercurial/hgweb/wsgiheaders.py
mercurial/hook.py
mercurial/httpconnection.py
mercurial/httppeer.py
mercurial/i18n.py
mercurial/interfaces/dirstate.py
mercurial/interfaces/repository.py
mercurial/interfaces/util.py
mercurial/keepalive.py
mercurial/linelog.py
mercurial/localrepo.py
mercurial/lock.py
mercurial/logcmdutil.py
mercurial/logexchange.py
mercurial/loggingutil.py
mercurial/lsprof.py
mercurial/lsprofcalltree.py
mercurial/mail.py
mercurial/manifest.py
mercurial/match.py
mercurial/mdiff.py
mercurial/merge.py
mercurial/mergeutil.py
mercurial/minifileset.py
mercurial/minirst.py
mercurial/namespaces.py
mercurial/narrowspec.py
mercurial/node.py
mercurial/obsolete.py
mercurial/obsutil.py
mercurial/parser.py
mercurial/patch.py
mercurial/pathutil.py
mercurial/phases.py
mercurial/posix.py
mercurial/profiling.py
mercurial/progress.py
mercurial/pure/base85.py
mercurial/pure/bdiff.py
mercurial/pure/charencode.py
mercurial/pure/mpatch.py
mercurial/pure/osutil.py
mercurial/pure/parsers.py
mercurial/pushkey.py
mercurial/pvec.py
mercurial/pycompat.py
mercurial/rcutil.py
mercurial/registrar.py
mercurial/repair.py
mercurial/repocache.py
mercurial/repoview.py
mercurial/revlog.py
mercurial/revlogutils/deltas.py
mercurial/revlogutils/flagutil.py
mercurial/revlogutils/sidedata.py
mercurial/revset.py
mercurial/revsetlang.py
mercurial/rewriteutil.py
mercurial/scmposix.py
mercurial/scmutil.py
mercurial/scmwindows.py
mercurial/server.py
mercurial/setdiscovery.py
mercurial/shelve.py
mercurial/similar.py
mercurial/simplemerge.py
mercurial/smartset.py
mercurial/sparse.py
mercurial/sshpeer.py
mercurial/sslutil.py
mercurial/stack.py
mercurial/state.py
mercurial/statichttprepo.py
mercurial/statprof.py
mercurial/store.py
mercurial/streamclone.py
mercurial/subrepo.py
mercurial/subrepoutil.py
mercurial/tagmerge.py
mercurial/tags.py
mercurial/templatefilters.py
mercurial/templatefuncs.py
mercurial/templatekw.py
mercurial/templater.py
mercurial/templateutil.py
mercurial/testing/storage.py
mercurial/transaction.py
mercurial/treediscovery.py
mercurial/txnutil.py
mercurial/ui.py
mercurial/unionrepo.py
mercurial/upgrade.py
mercurial/url.py
mercurial/urllibcompat.py
mercurial/util.py
mercurial/utils/cborutil.py
mercurial/utils/compression.py
mercurial/utils/dateutil.py
mercurial/utils/procutil.py
mercurial/utils/repoviewutil.py
mercurial/utils/storageutil.py
mercurial/utils/stringutil.py
mercurial/verify.py
mercurial/vfs.py
mercurial/win32.py
mercurial/windows.py
mercurial/wireprotoframing.py
mercurial/wireprotoserver.py
mercurial/wireprototypes.py
mercurial/wireprotov1peer.py
mercurial/wireprotov1server.py
mercurial/wireprotov2peer.py
mercurial/wireprotov2server.py
mercurial/worker.py
tests/test-hook.t
tests/test-subrepo.t
--- a/hgext/absorb.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/absorb.py	Sun Oct 06 09:48:39 2019 -0400
@@ -59,7 +59,7 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 cmdtable = {}
 command = registrar.command(cmdtable)
@@ -67,14 +67,14 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('absorb', 'add-noise', default=True)
-configitem('absorb', 'amend-flag', default=None)
-configitem('absorb', 'max-stack-size', default=50)
+configitem(b'absorb', b'add-noise', default=True)
+configitem(b'absorb', b'amend-flag', default=None)
+configitem(b'absorb', b'max-stack-size', default=50)
 
 colortable = {
-    'absorb.description': 'yellow',
-    'absorb.node': 'blue bold',
-    'absorb.path': 'bold',
+    b'absorb.description': b'yellow',
+    b'absorb.node': b'blue bold',
+    b'absorb.path': b'bold',
 }
 
 defaultdict = collections.defaultdict
@@ -98,7 +98,7 @@
     """minimal filecontext representing an empty file"""
 
     def data(self):
-        return ''
+        return b''
 
     def node(self):
         return node.nullid
@@ -364,11 +364,11 @@
             if self.ui.debugflag:
                 idx = (max(rev - 1, 0)) // 2
                 self.ui.write(
-                    _('%s: chunk %d:%d -> %d lines\n')
+                    _(b'%s: chunk %d:%d -> %d lines\n')
                     % (node.short(self.fctxs[idx].node()), a1, a2, len(blines))
                 )
             self.linelog.replacelines(rev, a1, a2, b1, b2)
-        if self.opts.get('edit_lines', False):
+        if self.opts.get(b'edit_lines', False):
             self.finalcontents = self._checkoutlinelogwithedits()
         else:
             self.finalcontents = self._checkoutlinelog()
@@ -434,7 +434,7 @@
         """like mdiff.allblocks, but only care about differences"""
         blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
         for chunk, btype in blocks:
-            if btype != '!':
+            if btype != b'!':
                 continue
             yield chunk
 
@@ -443,7 +443,7 @@
         this is similar to running a partial "annotate".
         """
         llog = linelog.linelog()
-        a, alines = '', []
+        a, alines = b'', []
         for i in pycompat.xrange(len(self.contents)):
             b, blines = self.contents[i], self.contentlines[i]
             llrev = i * 2 + 1
@@ -459,7 +459,7 @@
         for i in pycompat.xrange(len(self.contents)):
             rev = (i + 1) * 2
             self.linelog.annotate(rev)
-            content = ''.join(map(self._getline, self.linelog.annotateresult))
+            content = b''.join(map(self._getline, self.linelog.annotateresult))
             contents.append(content)
         return contents
 
@@ -469,8 +469,8 @@
         # header
         editortext = (
             _(
-                'HG: editing %s\nHG: "y" means the line to the right '
-                'exists in the changeset to the top\nHG:\n'
+                b'HG: editing %s\nHG: "y" means the line to the right '
+                b'exists in the changeset to the top\nHG:\n'
             )
             % self.fctxs[-1].path()
         )
@@ -481,13 +481,13 @@
             if not isinstance(f, emptyfilecontext)
         ]
         for i, (j, f) in enumerate(visiblefctxs):
-            editortext += _('HG: %s/%s %s %s\n') % (
-                '|' * i,
-                '-' * (len(visiblefctxs) - i + 1),
+            editortext += _(b'HG: %s/%s %s %s\n') % (
+                b'|' * i,
+                b'-' * (len(visiblefctxs) - i + 1),
                 node.short(f.node()),
-                f.description().split('\n', 1)[0],
+                f.description().split(b'\n', 1)[0],
             )
-        editortext += _('HG: %s\n') % ('|' * len(visiblefctxs))
+        editortext += _(b'HG: %s\n') % (b'|' * len(visiblefctxs))
         # figure out the lifetime of a line, this is relatively inefficient,
         # but probably fine
         lineset = defaultdict(lambda: set())  # {(llrev, linenum): {llrev}}
@@ -497,33 +497,33 @@
                 lineset[l].add(i)
         # append lines
         for l in alllines:
-            editortext += '    %s : %s' % (
-                ''.join(
+            editortext += b'    %s : %s' % (
+                b''.join(
                     [
-                        ('y' if i in lineset[l] else ' ')
+                        (b'y' if i in lineset[l] else b' ')
                         for i, _f in visiblefctxs
                     ]
                 ),
                 self._getline(l),
             )
         # run editor
-        editedtext = self.ui.edit(editortext, '', action='absorb')
+        editedtext = self.ui.edit(editortext, b'', action=b'absorb')
         if not editedtext:
-            raise error.Abort(_('empty editor text'))
+            raise error.Abort(_(b'empty editor text'))
         # parse edited result
-        contents = ['' for i in self.fctxs]
+        contents = [b'' for i in self.fctxs]
         leftpadpos = 4
         colonpos = leftpadpos + len(visiblefctxs) + 1
         for l in mdiff.splitnewlines(editedtext):
-            if l.startswith('HG:'):
+            if l.startswith(b'HG:'):
                 continue
-            if l[colonpos - 1 : colonpos + 2] != ' : ':
-                raise error.Abort(_('malformed line: %s') % l)
+            if l[colonpos - 1 : colonpos + 2] != b' : ':
+                raise error.Abort(_(b'malformed line: %s') % l)
             linecontent = l[colonpos + 2 :]
             for i, ch in enumerate(
                 pycompat.bytestr(l[leftpadpos : colonpos - 1])
             ):
-                if ch == 'y':
+                if ch == b'y':
                     contents[visiblefctxs[i][0]] += linecontent
         # chunkstats is hard to calculate if anything changes, therefore
         # set them to just a simple value (1, 1).
@@ -589,7 +589,7 @@
 
     def _showchanges(self, fm, alines, blines, chunk, fixups):
         def trim(line):
-            if line.endswith('\n'):
+            if line.endswith(b'\n'):
                 line = line[:-1]
             return line
 
@@ -605,25 +605,25 @@
 
         fm.startitem()
         fm.write(
-            'hunk',
-            '        %s\n',
-            '@@ -%d,%d +%d,%d @@' % (a1, a2 - a1, b1, b2 - b1),
-            label='diff.hunk',
+            b'hunk',
+            b'        %s\n',
+            b'@@ -%d,%d +%d,%d @@' % (a1, a2 - a1, b1, b2 - b1),
+            label=b'diff.hunk',
         )
-        fm.data(path=self.path, linetype='hunk')
+        fm.data(path=self.path, linetype=b'hunk')
 
         def writeline(idx, diffchar, line, linetype, linelabel):
             fm.startitem()
-            node = ''
+            node = b''
             if idx:
                 ctx = self.fctxs[idx]
                 fm.context(fctx=ctx)
                 node = ctx.hex()
                 self.ctxaffected.add(ctx.changectx())
-            fm.write('node', '%-7.7s ', node, label='absorb.node')
+            fm.write(b'node', b'%-7.7s ', node, label=b'absorb.node')
             fm.write(
-                'diffchar ' + linetype,
-                '%s%s\n',
+                b'diffchar ' + linetype,
+                b'%s%s\n',
                 diffchar,
                 line,
                 label=linelabel,
@@ -632,11 +632,19 @@
 
         for i in pycompat.xrange(a1, a2):
             writeline(
-                aidxs[i - a1], '-', trim(alines[i]), 'deleted', 'diff.deleted'
+                aidxs[i - a1],
+                b'-',
+                trim(alines[i]),
+                b'deleted',
+                b'diff.deleted',
             )
         for i in pycompat.xrange(b1, b2):
             writeline(
-                bidxs[i - b1], '+', trim(blines[i]), 'inserted', 'diff.inserted'
+                bidxs[i - b1],
+                b'+',
+                trim(blines[i]),
+                b'inserted',
+                b'diff.inserted',
             )
 
 
@@ -681,7 +689,7 @@
         self.paths = []
         # but if --edit-lines is used, the user may want to edit files
         # even if they are not modified
-        editopt = self.opts.get('edit_lines')
+        editopt = self.opts.get(b'edit_lines')
         if not self.status.modified and editopt and match:
             interestingpaths = match.files()
         else:
@@ -691,7 +699,7 @@
         # sorting is necessary to eliminate ambiguity for the "double move"
         # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
         for path in sorted(interestingpaths):
-            self.ui.debug('calculating fixups for %s\n' % path)
+            self.ui.debug(b'calculating fixups for %s\n' % path)
             targetfctx = targetctx[path]
             fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
             # ignore symbolic links or binary, or unchanged files
@@ -708,9 +716,9 @@
             fstate = filefixupstate(fctxs, path, ui=self.ui, opts=self.opts)
             if fm is not None:
                 fm.startitem()
-                fm.plain('showing changes for ')
-                fm.write('path', '%s\n', path, label='absorb.path')
-                fm.data(linetype='path')
+                fm.plain(b'showing changes for ')
+                fm.write(b'path', b'%s\n', path, label=b'absorb.path')
+                fm.data(linetype=b'path')
             fstate.diffwith(targetfctx, fm)
             self.fixupmap[path] = fstate
             self.paths.append(path)
@@ -720,7 +728,7 @@
         """apply fixups to individual filefixupstates"""
         for path, state in self.fixupmap.iteritems():
             if self.ui.debugflag:
-                self.ui.write(_('applying fixups to %s\n') % path)
+                self.ui.write(_(b'applying fixups to %s\n') % path)
             state.apply()
 
     @property
@@ -733,10 +741,10 @@
 
     def commit(self):
         """commit changes. update self.finalnode, self.replacemap"""
-        with self.repo.transaction('absorb') as tr:
+        with self.repo.transaction(b'absorb') as tr:
             self._commitstack()
             self._movebookmarks(tr)
-            if self.repo['.'].node() in self.replacemap:
+            if self.repo[b'.'].node() in self.replacemap:
                 self._moveworkingdirectoryparent()
             self._cleanupoldcommits()
         return self.finalnode
@@ -750,14 +758,14 @@
             for path, stat in chunkstats.iteritems():
                 if stat[0]:
                     ui.write(
-                        _('%s: %d of %d chunk(s) applied\n')
+                        _(b'%s: %d of %d chunk(s) applied\n')
                         % (path, stat[0], stat[1])
                     )
         elif not ui.quiet:
             # a summary for all files
             stats = chunkstats.values()
             applied, total = (sum(s[i] for s in stats) for i in (0, 1))
-            ui.write(_('%d of %d chunk(s) applied\n') % (applied, total))
+            ui.write(_(b'%d of %d chunk(s) applied\n') % (applied, total))
 
     def _commitstack(self):
         """make new commits. update self.finalnode, self.replacemap.
@@ -777,7 +785,7 @@
             if self._willbecomenoop(memworkingcopy, ctx, nextp1):
                 # changeset is no longer necessary
                 self.replacemap[ctx.node()] = None
-                msg = _('became empty and was dropped')
+                msg = _(b'became empty and was dropped')
             else:
                 # changeset needs re-commit
                 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
@@ -785,21 +793,21 @@
                 nextp1 = lastcommitted
                 self.replacemap[ctx.node()] = lastcommitted.node()
                 if memworkingcopy:
-                    msg = _('%d file(s) changed, became %s') % (
+                    msg = _(b'%d file(s) changed, became %s') % (
                         len(memworkingcopy),
                         self._ctx2str(lastcommitted),
                     )
                 else:
-                    msg = _('became %s') % self._ctx2str(lastcommitted)
+                    msg = _(b'became %s') % self._ctx2str(lastcommitted)
             if self.ui.verbose and msg:
-                self.ui.write(_('%s: %s\n') % (self._ctx2str(ctx), msg))
+                self.ui.write(_(b'%s: %s\n') % (self._ctx2str(ctx), msg))
         self.finalnode = lastcommitted and lastcommitted.node()
 
     def _ctx2str(self, ctx):
         if self.ui.debugflag:
-            return '%d:%s' % (ctx.rev(), ctx.hex())
+            return b'%d:%s' % (ctx.rev(), ctx.hex())
         else:
-            return '%d:%s' % (ctx.rev(), node.short(ctx.node()))
+            return b'%d:%s' % (ctx.rev(), node.short(ctx.node()))
 
     def _getnewfilecontents(self, ctx):
         """(ctx) -> {path: str}
@@ -832,18 +840,18 @@
                 changes.append((name, hsh))
                 if self.ui.verbose:
                     self.ui.write(
-                        _('moving bookmark %s to %s\n') % (name, node.hex(hsh))
+                        _(b'moving bookmark %s to %s\n') % (name, node.hex(hsh))
                     )
             else:
                 changes.append((name, None))
                 if self.ui.verbose:
-                    self.ui.write(_('deleting bookmark %s\n') % name)
+                    self.ui.write(_(b'deleting bookmark %s\n') % name)
         repo._bookmarks.applychanges(repo, tr, changes)
 
     def _moveworkingdirectoryparent(self):
         if not self.finalnode:
             # Find the latest not-{obsoleted,stripped} parent.
-            revs = self.repo.revs('max(::. - %ln)', self.replacemap.keys())
+            revs = self.repo.revs(b'max(::. - %ln)', self.replacemap.keys())
             ctx = self.repo[revs.first()]
             self.finalnode = ctx.node()
         else:
@@ -854,7 +862,7 @@
         # be slow. in absorb's case, no need to invalidate fsmonitorstate.
         noop = lambda: 0
         restore = noop
-        if util.safehasattr(dirstate, '_fsmonitorstate'):
+        if util.safehasattr(dirstate, b'_fsmonitorstate'):
             bak = dirstate._fsmonitorstate.invalidate
 
             def restore():
@@ -901,8 +909,8 @@
         """
         parents = p1 and (p1, node.nullid)
         extra = ctx.extra()
-        if self._useobsolete and self.ui.configbool('absorb', 'add-noise'):
-            extra['absorb_source'] = ctx.hex()
+        if self._useobsolete and self.ui.configbool(b'absorb', b'add-noise'):
+            extra[b'absorb_source'] = ctx.hex()
         mctx = overlaycontext(memworkingcopy, ctx, parents, extra=extra)
         return mctx.commit()
 
@@ -918,7 +926,7 @@
         }
         if replacements:
             scmutil.cleanupnodes(
-                self.repo, replacements, operation='absorb', fixphase=True
+                self.repo, replacements, operation=b'absorb', fixphase=True
             )
 
 
@@ -935,7 +943,7 @@
     patchlines = mdiff.splitnewlines(buf.getvalue())
     # hunk.prettystr() will update hunk.removed
     a2 = a1 + hunk.removed
-    blines = [l[1:] for l in patchlines[1:] if not l.startswith('-')]
+    blines = [l[1:] for l in patchlines[1:] if not l.startswith(b'-')]
     return path, (a1, a2, blines)
 
 
@@ -967,7 +975,7 @@
         lines = mdiff.splitnewlines(ctx[path].data())
         for a1, a2, blines in patches:
             lines[a1:a2] = blines
-        memworkingcopy[path] = ''.join(lines)
+        memworkingcopy[path] = b''.join(lines)
     return overlaycontext(memworkingcopy, ctx)
 
 
@@ -979,18 +987,21 @@
     return fixupstate.
     """
     if stack is None:
-        limit = ui.configint('absorb', 'max-stack-size')
-        headctx = repo['.']
+        limit = ui.configint(b'absorb', b'max-stack-size')
+        headctx = repo[b'.']
         if len(headctx.parents()) > 1:
-            raise error.Abort(_('cannot absorb into a merge'))
+            raise error.Abort(_(b'cannot absorb into a merge'))
         stack = getdraftstack(headctx, limit)
         if limit and len(stack) >= limit:
             ui.warn(
-                _('absorb: only the recent %d changesets will ' 'be analysed\n')
+                _(
+                    b'absorb: only the recent %d changesets will '
+                    b'be analysed\n'
+                )
                 % limit
             )
     if not stack:
-        raise error.Abort(_('no mutable changeset to change'))
+        raise error.Abort(_(b'no mutable changeset to change'))
     if targetctx is None:  # default to working copy
         targetctx = repo[None]
     if pats is None:
@@ -999,85 +1010,89 @@
         opts = {}
     state = fixupstate(stack, ui=ui, opts=opts)
     matcher = scmutil.match(targetctx, pats, opts)
-    if opts.get('interactive'):
+    if opts.get(b'interactive'):
         diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
         origchunks = patch.parsepatch(diff)
         chunks = cmdutil.recordfilter(ui, origchunks, matcher)[0]
         targetctx = overlaydiffcontext(stack[-1], chunks)
     fm = None
-    if opts.get('print_changes') or not opts.get('apply_changes'):
-        fm = ui.formatter('absorb', opts)
+    if opts.get(b'print_changes') or not opts.get(b'apply_changes'):
+        fm = ui.formatter(b'absorb', opts)
     state.diffwith(targetctx, matcher, fm)
     if fm is not None:
         fm.startitem()
-        fm.write("count", "\n%d changesets affected\n", len(state.ctxaffected))
-        fm.data(linetype='summary')
+        fm.write(
+            b"count", b"\n%d changesets affected\n", len(state.ctxaffected)
+        )
+        fm.data(linetype=b'summary')
         for ctx in reversed(stack):
             if ctx not in state.ctxaffected:
                 continue
             fm.startitem()
             fm.context(ctx=ctx)
-            fm.data(linetype='changeset')
-            fm.write('node', '%-7.7s ', ctx.hex(), label='absorb.node')
+            fm.data(linetype=b'changeset')
+            fm.write(b'node', b'%-7.7s ', ctx.hex(), label=b'absorb.node')
             descfirstline = ctx.description().splitlines()[0]
             fm.write(
-                'descfirstline',
-                '%s\n',
+                b'descfirstline',
+                b'%s\n',
                 descfirstline,
-                label='absorb.description',
+                label=b'absorb.description',
             )
         fm.end()
-    if not opts.get('dry_run'):
+    if not opts.get(b'dry_run'):
         if (
-            not opts.get('apply_changes')
+            not opts.get(b'apply_changes')
             and state.ctxaffected
-            and ui.promptchoice("apply changes (yn)? $$ &Yes $$ &No", default=1)
+            and ui.promptchoice(
+                b"apply changes (yn)? $$ &Yes $$ &No", default=1
+            )
         ):
-            raise error.Abort(_('absorb cancelled\n'))
+            raise error.Abort(_(b'absorb cancelled\n'))
 
         state.apply()
         if state.commit():
             state.printchunkstats()
         elif not ui.quiet:
-            ui.write(_('nothing applied\n'))
+            ui.write(_(b'nothing applied\n'))
     return state
 
 
 @command(
-    'absorb',
+    b'absorb',
     [
         (
-            'a',
-            'apply-changes',
+            b'a',
+            b'apply-changes',
             None,
-            _('apply changes without prompting for confirmation'),
+            _(b'apply changes without prompting for confirmation'),
         ),
         (
-            'p',
-            'print-changes',
+            b'p',
+            b'print-changes',
             None,
-            _('always print which changesets are modified by which changes'),
+            _(b'always print which changesets are modified by which changes'),
         ),
         (
-            'i',
-            'interactive',
+            b'i',
+            b'interactive',
             None,
-            _('interactively select which chunks to apply (EXPERIMENTAL)'),
+            _(b'interactively select which chunks to apply (EXPERIMENTAL)'),
         ),
         (
-            'e',
-            'edit-lines',
+            b'e',
+            b'edit-lines',
             None,
             _(
-                'edit what lines belong to which changesets before commit '
-                '(EXPERIMENTAL)'
+                b'edit what lines belong to which changesets before commit '
+                b'(EXPERIMENTAL)'
             ),
         ),
     ]
     + commands.dryrunopts
     + commands.templateopts
     + commands.walkopts,
-    _('hg absorb [OPTION] [FILE]...'),
+    _(b'hg absorb [OPTION] [FILE]...'),
     helpcategory=command.CATEGORY_COMMITTING,
     helpbasic=True,
 )
@@ -1108,7 +1123,7 @@
     opts = pycompat.byteskwargs(opts)
 
     with repo.wlock(), repo.lock():
-        if not opts['dry_run']:
+        if not opts[b'dry_run']:
             cmdutil.checkunfinished(repo)
 
         state = absorb(ui, repo, pats=pats, opts=opts)
--- a/hgext/acl.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/acl.py	Sun Oct 06 09:48:39 2019 -0400
@@ -232,66 +232,66 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 configtable = {}
 configitem = registrar.configitem(configtable)
 
 # deprecated config: acl.config
 configitem(
-    'acl', 'config', default=None,
+    b'acl', b'config', default=None,
 )
 configitem(
-    'acl.groups', '.*', default=None, generic=True,
+    b'acl.groups', b'.*', default=None, generic=True,
 )
 configitem(
-    'acl.deny.branches', '.*', default=None, generic=True,
+    b'acl.deny.branches', b'.*', default=None, generic=True,
 )
 configitem(
-    'acl.allow.branches', '.*', default=None, generic=True,
+    b'acl.allow.branches', b'.*', default=None, generic=True,
 )
 configitem(
-    'acl.deny', '.*', default=None, generic=True,
+    b'acl.deny', b'.*', default=None, generic=True,
 )
 configitem(
-    'acl.allow', '.*', default=None, generic=True,
+    b'acl.allow', b'.*', default=None, generic=True,
 )
 configitem(
-    'acl', 'sources', default=lambda: ['serve'],
+    b'acl', b'sources', default=lambda: [b'serve'],
 )
 
 
 def _getusers(ui, group):
 
     # First, try to use group definition from section [acl.groups]
-    hgrcusers = ui.configlist('acl.groups', group)
+    hgrcusers = ui.configlist(b'acl.groups', group)
     if hgrcusers:
         return hgrcusers
 
-    ui.debug('acl: "%s" not defined in [acl.groups]\n' % group)
+    ui.debug(b'acl: "%s" not defined in [acl.groups]\n' % group)
     # If no users found in group definition, get users from OS-level group
     try:
         return util.groupmembers(group)
     except KeyError:
-        raise error.Abort(_("group '%s' is undefined") % group)
+        raise error.Abort(_(b"group '%s' is undefined") % group)
 
 
 def _usermatch(ui, user, usersorgroups):
 
-    if usersorgroups == '*':
+    if usersorgroups == b'*':
         return True
 
-    for ug in usersorgroups.replace(',', ' ').split():
+    for ug in usersorgroups.replace(b',', b' ').split():
 
-        if ug.startswith('!'):
+        if ug.startswith(b'!'):
             # Test for excluded user or group. Format:
             # if ug is a user  name: !username
             # if ug is a group name: !@groupname
             ug = ug[1:]
             if (
-                not ug.startswith('@')
+                not ug.startswith(b'@')
                 and user != ug
-                or ug.startswith('@')
+                or ug.startswith(b'@')
                 and user not in _getusers(ui, ug[1:])
             ):
                 return True
@@ -299,7 +299,9 @@
         # Test for user or group. Format:
         # if ug is a user  name: username
         # if ug is a group name: @groupname
-        elif user == ug or ug.startswith('@') and user in _getusers(ui, ug[1:]):
+        elif (
+            user == ug or ug.startswith(b'@') and user in _getusers(ui, ug[1:])
+        ):
             return True
 
     return False
@@ -308,14 +310,14 @@
 def buildmatch(ui, repo, user, key):
     '''return tuple of (match function, list enabled).'''
     if not ui.has_section(key):
-        ui.debug('acl: %s not enabled\n' % key)
+        ui.debug(b'acl: %s not enabled\n' % key)
         return None
 
     pats = [
         pat for pat, users in ui.configitems(key) if _usermatch(ui, user, users)
     ]
     ui.debug(
-        'acl: %s enabled, %d entries for user %s\n' % (key, len(pats), user)
+        b'acl: %s enabled, %d entries for user %s\n' % (key, len(pats), user)
     )
 
     # Branch-based ACL
@@ -323,14 +325,14 @@
         if pats:
             # If there's an asterisk (meaning "any branch"), always return True;
             # Otherwise, test if b is in pats
-            if '*' in pats:
+            if b'*' in pats:
                 return util.always
             return lambda b: b in pats
         return util.never
 
     # Path-based ACL
     if pats:
-        return match.match(repo.root, '', pats)
+        return match.match(repo.root, b'', pats)
     return util.never
 
 
@@ -342,122 +344,128 @@
     never loaded. This function ensure the extension is enabled when running
     hooks.
     """
-    if 'acl' in ui._knownconfig:
+    if b'acl' in ui._knownconfig:
         return
-    ui.setconfig('extensions', 'acl', '', source='internal')
-    extensions.loadall(ui, ['acl'])
+    ui.setconfig(b'extensions', b'acl', b'', source=b'internal')
+    extensions.loadall(ui, [b'acl'])
 
 
 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
 
     ensureenabled(ui)
 
-    if hooktype not in ['pretxnchangegroup', 'pretxncommit', 'prepushkey']:
+    if hooktype not in [b'pretxnchangegroup', b'pretxncommit', b'prepushkey']:
         raise error.Abort(
             _(
-                'config error - hook type "%s" cannot stop '
-                'incoming changesets, commits, nor bookmarks'
+                b'config error - hook type "%s" cannot stop '
+                b'incoming changesets, commits, nor bookmarks'
             )
             % hooktype
         )
-    if hooktype == 'pretxnchangegroup' and source not in ui.configlist(
-        'acl', 'sources'
+    if hooktype == b'pretxnchangegroup' and source not in ui.configlist(
+        b'acl', b'sources'
     ):
-        ui.debug('acl: changes have source "%s" - skipping\n' % source)
+        ui.debug(b'acl: changes have source "%s" - skipping\n' % source)
         return
 
     user = None
-    if source == 'serve' and r'url' in kwargs:
-        url = kwargs[r'url'].split(':')
-        if url[0] == 'remote' and url[1].startswith('http'):
+    if source == b'serve' and r'url' in kwargs:
+        url = kwargs[r'url'].split(b':')
+        if url[0] == b'remote' and url[1].startswith(b'http'):
             user = urlreq.unquote(url[3])
 
     if user is None:
         user = procutil.getuser()
 
-    ui.debug('acl: checking access for user "%s"\n' % user)
+    ui.debug(b'acl: checking access for user "%s"\n' % user)
 
-    if hooktype == 'prepushkey':
+    if hooktype == b'prepushkey':
         _pkhook(ui, repo, hooktype, node, source, user, **kwargs)
     else:
         _txnhook(ui, repo, hooktype, node, source, user, **kwargs)
 
 
 def _pkhook(ui, repo, hooktype, node, source, user, **kwargs):
-    if kwargs[r'namespace'] == 'bookmarks':
+    if kwargs[r'namespace'] == b'bookmarks':
         bookmark = kwargs[r'key']
         ctx = kwargs[r'new']
-        allowbookmarks = buildmatch(ui, None, user, 'acl.allow.bookmarks')
-        denybookmarks = buildmatch(ui, None, user, 'acl.deny.bookmarks')
+        allowbookmarks = buildmatch(ui, None, user, b'acl.allow.bookmarks')
+        denybookmarks = buildmatch(ui, None, user, b'acl.deny.bookmarks')
 
         if denybookmarks and denybookmarks(bookmark):
             raise error.Abort(
-                _('acl: user "%s" denied on bookmark "%s"' ' (changeset "%s")')
+                _(
+                    b'acl: user "%s" denied on bookmark "%s"'
+                    b' (changeset "%s")'
+                )
                 % (user, bookmark, ctx)
             )
         if allowbookmarks and not allowbookmarks(bookmark):
             raise error.Abort(
                 _(
-                    'acl: user "%s" not allowed on bookmark "%s"'
-                    ' (changeset "%s")'
+                    b'acl: user "%s" not allowed on bookmark "%s"'
+                    b' (changeset "%s")'
                 )
                 % (user, bookmark, ctx)
             )
         ui.debug(
-            'acl: bookmark access granted: "%s" on bookmark "%s"\n'
+            b'acl: bookmark access granted: "%s" on bookmark "%s"\n'
             % (ctx, bookmark)
         )
 
 
 def _txnhook(ui, repo, hooktype, node, source, user, **kwargs):
     # deprecated config: acl.config
-    cfg = ui.config('acl', 'config')
+    cfg = ui.config(b'acl', b'config')
     if cfg:
         ui.readconfig(
             cfg,
             sections=[
-                'acl.groups',
-                'acl.allow.branches',
-                'acl.deny.branches',
-                'acl.allow',
-                'acl.deny',
+                b'acl.groups',
+                b'acl.allow.branches',
+                b'acl.deny.branches',
+                b'acl.allow',
+                b'acl.deny',
             ],
         )
 
-    allowbranches = buildmatch(ui, None, user, 'acl.allow.branches')
-    denybranches = buildmatch(ui, None, user, 'acl.deny.branches')
-    allow = buildmatch(ui, repo, user, 'acl.allow')
-    deny = buildmatch(ui, repo, user, 'acl.deny')
+    allowbranches = buildmatch(ui, None, user, b'acl.allow.branches')
+    denybranches = buildmatch(ui, None, user, b'acl.deny.branches')
+    allow = buildmatch(ui, repo, user, b'acl.allow')
+    deny = buildmatch(ui, repo, user, b'acl.deny')
 
     for rev in pycompat.xrange(repo[node].rev(), len(repo)):
         ctx = repo[rev]
         branch = ctx.branch()
         if denybranches and denybranches(branch):
             raise error.Abort(
-                _('acl: user "%s" denied on branch "%s"' ' (changeset "%s")')
+                _(b'acl: user "%s" denied on branch "%s"' b' (changeset "%s")')
                 % (user, branch, ctx)
             )
         if allowbranches and not allowbranches(branch):
             raise error.Abort(
                 _(
-                    'acl: user "%s" not allowed on branch "%s"'
-                    ' (changeset "%s")'
+                    b'acl: user "%s" not allowed on branch "%s"'
+                    b' (changeset "%s")'
                 )
                 % (user, branch, ctx)
             )
         ui.debug(
-            'acl: branch access granted: "%s" on branch "%s"\n' % (ctx, branch)
+            b'acl: branch access granted: "%s" on branch "%s"\n' % (ctx, branch)
         )
 
         for f in ctx.files():
             if deny and deny(f):
                 raise error.Abort(
-                    _('acl: user "%s" denied on "%s"' ' (changeset "%s")')
+                    _(b'acl: user "%s" denied on "%s"' b' (changeset "%s")')
                     % (user, f, ctx)
                 )
             if allow and not allow(f):
                 raise error.Abort(
-                    _('acl: user "%s" not allowed on "%s"' ' (changeset "%s")')
+                    _(
+                        b'acl: user "%s" not allowed on "%s"'
+                        b' (changeset "%s")'
+                    )
                     % (user, f, ctx)
                 )
-        ui.debug('acl: path access granted: "%s"\n' % ctx)
+        ui.debug(b'acl: path access granted: "%s"\n' % ctx)
--- a/hgext/amend.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/amend.py	Sun Oct 06 09:48:39 2019 -0400
@@ -24,23 +24,23 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 cmdtable = {}
 command = registrar.command(cmdtable)
 
 
 @command(
-    'amend',
+    b'amend',
     [
         (
-            'A',
-            'addremove',
+            b'A',
+            b'addremove',
             None,
-            _('mark new/missing files as added/removed before committing'),
+            _(b'mark new/missing files as added/removed before committing'),
         ),
-        ('e', 'edit', None, _('invoke editor on commit messages')),
-        ('i', 'interactive', None, _('use interactive mode')),
+        (b'e', b'edit', None, _(b'invoke editor on commit messages')),
+        (b'i', b'interactive', None, _(b'use interactive mode')),
         (
             b'',
             b'close-branch',
@@ -48,13 +48,13 @@
             _(b'mark a branch as closed, hiding it from the branch list'),
         ),
         (b's', b'secret', None, _(b'use the secret phase for committing')),
-        ('n', 'note', '', _('store a note on the amend')),
+        (b'n', b'note', b'', _(b'store a note on the amend')),
     ]
     + cmdutil.walkopts
     + cmdutil.commitopts
     + cmdutil.commitopts2
     + cmdutil.commitopts3,
-    _('[OPTION]... [FILE]...'),
+    _(b'[OPTION]... [FILE]...'),
     helpcategory=command.CATEGORY_COMMITTING,
     inferrepo=True,
 )
@@ -70,7 +70,7 @@
     cmdutil.checknotesize(ui, opts)
 
     with repo.wlock(), repo.lock():
-        if not opts.get('logfile'):
-            opts['message'] = opts.get('message') or repo['.'].description()
-        opts['amend'] = True
+        if not opts.get(b'logfile'):
+            opts[b'message'] = opts.get(b'message') or repo[b'.'].description()
+        opts[b'amend'] = True
         return commands._docommit(ui, repo, *pats, **pycompat.strkwargs(opts))
--- a/hgext/automv.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/automv.py	Sun Oct 06 09:48:39 2019 -0400
@@ -42,14 +42,14 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    'automv', 'similarity', default=95,
+    b'automv', b'similarity', default=95,
 )
 
 
 def extsetup(ui):
-    entry = extensions.wrapcommand(commands.table, 'commit', mvcheck)
+    entry = extensions.wrapcommand(commands.table, b'commit', mvcheck)
     entry[1].append(
-        ('', 'no-automv', None, _('disable automatic file move detection'))
+        (b'', b'no-automv', None, _(b'disable automatic file move detection'))
     )
 
 
@@ -57,11 +57,11 @@
     """Hook to check for moves at commit time"""
     opts = pycompat.byteskwargs(opts)
     renames = None
-    disabled = opts.pop('no_automv', False)
+    disabled = opts.pop(b'no_automv', False)
     if not disabled:
-        threshold = ui.configint('automv', 'similarity')
+        threshold = ui.configint(b'automv', b'similarity')
         if not 0 <= threshold <= 100:
-            raise error.Abort(_('automv.similarity must be between 0 and 100'))
+            raise error.Abort(_(b'automv.similarity must be between 0 and 100'))
         if threshold > 0:
             match = scmutil.match(repo[None], pats, opts)
             added, removed = _interestingfiles(repo, match)
@@ -87,7 +87,7 @@
     added = stat.added
     removed = stat.removed
 
-    copy = copies.pathcopies(repo['.'], repo[None], matcher)
+    copy = copies.pathcopies(repo[b'.'], repo[None], matcher)
     # remove the copy files for which we already have copy info
     added = [f for f in added if f not in copy]
 
@@ -108,10 +108,10 @@
         ):
             if repo.ui.verbose:
                 repo.ui.status(
-                    _('detected move of %s as %s (%d%% similar)\n')
+                    _(b'detected move of %s as %s (%d%% similar)\n')
                     % (uipathfn(src), uipathfn(dst), score * 100)
                 )
             renames[dst] = src
     if renames:
-        repo.ui.status(_('detected move of %d files\n') % len(renames))
+        repo.ui.status(_(b'detected move of %d files\n') % len(renames))
     return renames
--- a/hgext/beautifygraph.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/beautifygraph.py	Sun Oct 06 09:48:39 2019 -0400
@@ -26,33 +26,33 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 
 def prettyedge(before, edge, after):
-    if edge == '~':
-        return '\xE2\x95\xA7'  # U+2567 ╧
-    if edge == '/':
-        return '\xE2\x95\xB1'  # U+2571 ╱
-    if edge == '-':
-        return '\xE2\x94\x80'  # U+2500 ─
-    if edge == '|':
-        return '\xE2\x94\x82'  # U+2502 │
-    if edge == ':':
-        return '\xE2\x94\x86'  # U+2506 ┆
-    if edge == '\\':
-        return '\xE2\x95\xB2'  # U+2572 ╲
-    if edge == '+':
-        if before == ' ' and not after == ' ':
-            return '\xE2\x94\x9C'  # U+251C ├
-        if after == ' ' and not before == ' ':
-            return '\xE2\x94\xA4'  # U+2524 ┤
-        return '\xE2\x94\xBC'  # U+253C ┼
+    if edge == b'~':
+        return b'\xE2\x95\xA7'  # U+2567 ╧
+    if edge == b'/':
+        return b'\xE2\x95\xB1'  # U+2571 ╱
+    if edge == b'-':
+        return b'\xE2\x94\x80'  # U+2500 ─
+    if edge == b'|':
+        return b'\xE2\x94\x82'  # U+2502 │
+    if edge == b':':
+        return b'\xE2\x94\x86'  # U+2506 ┆
+    if edge == b'\\':
+        return b'\xE2\x95\xB2'  # U+2572 ╲
+    if edge == b'+':
+        if before == b' ' and not after == b' ':
+            return b'\xE2\x94\x9C'  # U+251C ├
+        if after == b' ' and not before == b' ':
+            return b'\xE2\x94\xA4'  # U+2524 ┤
+        return b'\xE2\x94\xBC'  # U+253C ┼
     return edge
 
 
 def convertedges(line):
-    line = ' %s ' % line
+    line = b' %s ' % line
     pretty = []
     for idx in pycompat.xrange(len(line) - 2):
         pretty.append(
@@ -62,21 +62,21 @@
                 line[idx + 2 : idx + 3],
             )
         )
-    return ''.join(pretty)
+    return b''.join(pretty)
 
 
 def getprettygraphnode(orig, *args, **kwargs):
     node = orig(*args, **kwargs)
-    if node == 'o':
-        return '\xE2\x97\x8B'  # U+25CB ○
-    if node == '@':
-        return '\xE2\x97\x8D'  # U+25CD ◍
-    if node == '*':
-        return '\xE2\x88\x97'  # U+2217 ∗
-    if node == 'x':
-        return '\xE2\x97\x8C'  # U+25CC ◌
-    if node == '_':
-        return '\xE2\x95\xA4'  # U+2564 ╤
+    if node == b'o':
+        return b'\xE2\x97\x8B'  # U+25CB ○
+    if node == b'@':
+        return b'\xE2\x97\x8D'  # U+25CD ◍
+    if node == b'*':
+        return b'\xE2\x88\x97'  # U+2217 ∗
+    if node == b'x':
+        return b'\xE2\x97\x8C'  # U+25CC ◌
+    if node == b'_':
+        return b'\xE2\x95\xA4'  # U+2564 ╤
     return node
 
 
@@ -87,21 +87,21 @@
 
 
 def extsetup(ui):
-    if ui.plain('graph'):
+    if ui.plain(b'graph'):
         return
 
-    if encoding.encoding != 'UTF-8':
-        ui.warn(_('beautifygraph: unsupported encoding, UTF-8 required\n'))
+    if encoding.encoding != b'UTF-8':
+        ui.warn(_(b'beautifygraph: unsupported encoding, UTF-8 required\n'))
         return
 
     if r'A' in encoding._wide:
         ui.warn(
             _(
-                'beautifygraph: unsupported terminal settings, '
-                'monospace narrow text required\n'
+                b'beautifygraph: unsupported terminal settings, '
+                b'monospace narrow text required\n'
             )
         )
         return
 
-    extensions.wrapfunction(graphmod, 'outputgraph', outputprettygraph)
-    extensions.wrapfunction(templatekw, 'getgraphnode', getprettygraphnode)
+    extensions.wrapfunction(graphmod, b'outputgraph', outputprettygraph)
+    extensions.wrapfunction(templatekw, b'getgraphnode', getprettygraphnode)
--- a/hgext/blackbox.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/blackbox.py	Sun Oct 06 09:48:39 2019 -0400
@@ -63,7 +63,7 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 cmdtable = {}
 command = registrar.command(cmdtable)
@@ -72,27 +72,27 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    'blackbox', 'dirty', default=False,
+    b'blackbox', b'dirty', default=False,
 )
 configitem(
-    'blackbox', 'maxsize', default='1 MB',
+    b'blackbox', b'maxsize', default=b'1 MB',
 )
 configitem(
-    'blackbox', 'logsource', default=False,
+    b'blackbox', b'logsource', default=False,
 )
 configitem(
-    'blackbox', 'maxfiles', default=7,
+    b'blackbox', b'maxfiles', default=7,
 )
 configitem(
-    'blackbox', 'track', default=lambda: ['*'],
+    b'blackbox', b'track', default=lambda: [b'*'],
 )
 configitem(
-    'blackbox',
-    'ignore',
-    default=lambda: ['chgserver', 'cmdserver', 'extension'],
+    b'blackbox',
+    b'ignore',
+    default=lambda: [b'chgserver', b'cmdserver', b'extension'],
 )
 configitem(
-    'blackbox', 'date-format', default='%Y/%m/%d %H:%M:%S',
+    b'blackbox', b'date-format', default=b'%Y/%m/%d %H:%M:%S',
 )
 
 _lastlogger = loggingutil.proxylogger()
@@ -101,10 +101,10 @@
 class blackboxlogger(object):
     def __init__(self, ui, repo):
         self._repo = repo
-        self._trackedevents = set(ui.configlist('blackbox', 'track'))
-        self._ignoredevents = set(ui.configlist('blackbox', 'ignore'))
-        self._maxfiles = ui.configint('blackbox', 'maxfiles')
-        self._maxsize = ui.configbytes('blackbox', 'maxsize')
+        self._trackedevents = set(ui.configlist(b'blackbox', b'track'))
+        self._ignoredevents = set(ui.configlist(b'blackbox', b'ignore'))
+        self._maxfiles = ui.configint(b'blackbox', b'maxfiles')
+        self._maxsize = ui.configbytes(b'blackbox', b'maxsize')
         self._inlog = False
 
     def tracked(self, event):
@@ -125,29 +125,29 @@
             self._inlog = False
 
     def _log(self, ui, event, msg, opts):
-        default = ui.configdate('devel', 'default-date')
-        date = dateutil.datestr(default, ui.config('blackbox', 'date-format'))
+        default = ui.configdate(b'devel', b'default-date')
+        date = dateutil.datestr(default, ui.config(b'blackbox', b'date-format'))
         user = procutil.getuser()
-        pid = '%d' % procutil.getpid()
-        changed = ''
+        pid = b'%d' % procutil.getpid()
+        changed = b''
         ctx = self._repo[None]
         parents = ctx.parents()
-        rev = '+'.join([hex(p.node()) for p in parents])
-        if ui.configbool('blackbox', 'dirty') and ctx.dirty(
+        rev = b'+'.join([hex(p.node()) for p in parents])
+        if ui.configbool(b'blackbox', b'dirty') and ctx.dirty(
             missing=True, merge=False, branch=False
         ):
-            changed = '+'
-        if ui.configbool('blackbox', 'logsource'):
-            src = ' [%s]' % event
+            changed = b'+'
+        if ui.configbool(b'blackbox', b'logsource'):
+            src = b' [%s]' % event
         else:
-            src = ''
+            src = b''
         try:
-            fmt = '%s %s @%s%s (%s)%s> %s'
+            fmt = b'%s %s @%s%s (%s)%s> %s'
             args = (date, user, rev, changed, pid, src, msg)
             with loggingutil.openlogfile(
                 ui,
                 self._repo.vfs,
-                name='blackbox.log',
+                name=b'blackbox.log',
                 maxfiles=self._maxfiles,
                 maxsize=self._maxsize,
             ) as fp:
@@ -156,7 +156,7 @@
             # deactivate this to avoid failed logging again
             self._trackedevents.clear()
             ui.debug(
-                'warning: cannot write to blackbox.log: %s\n'
+                b'warning: cannot write to blackbox.log: %s\n'
                 % encoding.strtolocal(err.strerror)
             )
             return
@@ -184,13 +184,13 @@
     if _lastlogger.logger is None:
         _lastlogger.logger = logger
 
-    repo._wlockfreeprefix.add('blackbox.log')
+    repo._wlockfreeprefix.add(b'blackbox.log')
 
 
 @command(
-    'blackbox',
-    [('l', 'limit', 10, _('the number of events to show')),],
-    _('hg blackbox [OPTION]...'),
+    b'blackbox',
+    [(b'l', b'limit', 10, _(b'the number of events to show')),],
+    _(b'hg blackbox [OPTION]...'),
     helpcategory=command.CATEGORY_MAINTENANCE,
     helpbasic=True,
 )
@@ -198,12 +198,12 @@
     '''view the recent repository events
     '''
 
-    if not repo.vfs.exists('blackbox.log'):
+    if not repo.vfs.exists(b'blackbox.log'):
         return
 
     limit = opts.get(r'limit')
-    fp = repo.vfs('blackbox.log', 'r')
-    lines = fp.read().split('\n')
+    fp = repo.vfs(b'blackbox.log', b'r')
+    lines = fp.read().split(b'\n')
 
     count = 0
     output = []
@@ -216,4 +216,4 @@
             count += 1
         output.append(line)
 
-    ui.status('\n'.join(reversed(output)))
+    ui.status(b'\n'.join(reversed(output)))
--- a/hgext/bookflow.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/bookflow.py	Sun Oct 06 09:48:39 2019 -0400
@@ -24,14 +24,14 @@
     registrar,
 )
 
-MY_NAME = 'bookflow'
+MY_NAME = b'bookflow'
 
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem(MY_NAME, 'protect', ['@'])
-configitem(MY_NAME, 'require-bookmark', True)
-configitem(MY_NAME, 'enable-branches', False)
+configitem(MY_NAME, b'protect', [b'@'])
+configitem(MY_NAME, b'require-bookmark', True)
+configitem(MY_NAME, b'enable-branches', False)
 
 cmdtable = {}
 command = registrar.command(cmdtable)
@@ -40,19 +40,19 @@
 def commit_hook(ui, repo, **kwargs):
     active = repo._bookmarks.active
     if active:
-        if active in ui.configlist(MY_NAME, 'protect'):
+        if active in ui.configlist(MY_NAME, b'protect'):
             raise error.Abort(
-                _('cannot commit, bookmark %s is protected') % active
+                _(b'cannot commit, bookmark %s is protected') % active
             )
         if not cwd_at_bookmark(repo, active):
             raise error.Abort(
                 _(
-                    'cannot commit, working directory out of sync with active bookmark'
+                    b'cannot commit, working directory out of sync with active bookmark'
                 ),
-                hint=_("run 'hg up %s'") % active,
+                hint=_(b"run 'hg up %s'") % active,
             )
-    elif ui.configbool(MY_NAME, 'require-bookmark', True):
-        raise error.Abort(_('cannot commit without an active bookmark'))
+    elif ui.configbool(MY_NAME, b'require-bookmark', True):
+        raise error.Abort(_(b'cannot commit without an active bookmark'))
     return 0
 
 
@@ -74,7 +74,7 @@
             if name in marks:
                 raise error.Abort(
                     _(
-                        "bookmark %s already exists, to move use the --rev option"
+                        b"bookmark %s already exists, to move use the --rev option"
                     )
                     % name
                 )
@@ -92,8 +92,8 @@
     if active and not cwd_at_bookmark(repo, active):
         ui.warn(
             _(
-                "working directory out of sync with active bookmark, run "
-                "'hg up %s'"
+                b"working directory out of sync with active bookmark, run "
+                b"'hg up %s'"
             )
             % active
         )
@@ -104,23 +104,23 @@
     if label and not opts.get(r'clean') and not opts.get(r'rev'):
         raise error.Abort(
             _(
-                "creating named branches is disabled and you should use bookmarks"
+                b"creating named branches is disabled and you should use bookmarks"
             ),
-            hint="see 'hg help bookflow'",
+            hint=b"see 'hg help bookflow'",
         )
     return orig(ui, repo, label, **opts)
 
 
 def cwd_at_bookmark(repo, mark):
     mark_id = repo._bookmarks[mark]
-    cur_id = repo.lookup('.')
+    cur_id = repo.lookup(b'.')
     return cur_id == mark_id
 
 
 def uisetup(ui):
-    extensions.wrapfunction(bookmarks, 'update', bookmarks_update)
-    extensions.wrapfunction(bookmarks, 'addbookmarks', bookmarks_addbookmarks)
-    extensions.wrapcommand(commands.table, 'commit', commands_commit)
-    extensions.wrapcommand(commands.table, 'pull', commands_pull)
-    if not ui.configbool(MY_NAME, 'enable-branches'):
-        extensions.wrapcommand(commands.table, 'branch', commands_branch)
+    extensions.wrapfunction(bookmarks, b'update', bookmarks_update)
+    extensions.wrapfunction(bookmarks, b'addbookmarks', bookmarks_addbookmarks)
+    extensions.wrapcommand(commands.table, b'commit', commands_commit)
+    extensions.wrapcommand(commands.table, b'pull', commands_pull)
+    if not ui.configbool(MY_NAME, b'enable-branches'):
+        extensions.wrapcommand(commands.table, b'branch', commands_branch)
--- a/hgext/bugzilla.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/bugzilla.py	Sun Oct 06 09:48:39 2019 -0400
@@ -319,32 +319,32 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 configtable = {}
 configitem = registrar.configitem(configtable)
 
 configitem(
-    'bugzilla', 'apikey', default='',
+    b'bugzilla', b'apikey', default=b'',
 )
 configitem(
-    'bugzilla', 'bzdir', default='/var/www/html/bugzilla',
+    b'bugzilla', b'bzdir', default=b'/var/www/html/bugzilla',
 )
 configitem(
-    'bugzilla', 'bzemail', default=None,
+    b'bugzilla', b'bzemail', default=None,
 )
 configitem(
-    'bugzilla', 'bzurl', default='http://localhost/bugzilla/',
+    b'bugzilla', b'bzurl', default=b'http://localhost/bugzilla/',
 )
 configitem(
-    'bugzilla', 'bzuser', default=None,
+    b'bugzilla', b'bzuser', default=None,
 )
 configitem(
-    'bugzilla', 'db', default='bugs',
+    b'bugzilla', b'db', default=b'bugs',
 )
 configitem(
-    'bugzilla',
-    'fixregexp',
+    b'bugzilla',
+    b'fixregexp',
     default=(
         br'fix(?:es)?\s*(?:bugs?\s*)?,?\s*'
         br'(?:nos?\.?|num(?:ber)?s?)?\s*'
@@ -353,23 +353,23 @@
     ),
 )
 configitem(
-    'bugzilla', 'fixresolution', default='FIXED',
+    b'bugzilla', b'fixresolution', default=b'FIXED',
 )
 configitem(
-    'bugzilla', 'fixstatus', default='RESOLVED',
+    b'bugzilla', b'fixstatus', default=b'RESOLVED',
 )
 configitem(
-    'bugzilla', 'host', default='localhost',
+    b'bugzilla', b'host', default=b'localhost',
 )
 configitem(
-    'bugzilla', 'notify', default=configitem.dynamicdefault,
+    b'bugzilla', b'notify', default=configitem.dynamicdefault,
 )
 configitem(
-    'bugzilla', 'password', default=None,
+    b'bugzilla', b'password', default=None,
 )
 configitem(
-    'bugzilla',
-    'regexp',
+    b'bugzilla',
+    b'regexp',
     default=(
         br'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
         br'(?P<ids>(?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
@@ -377,25 +377,25 @@
     ),
 )
 configitem(
-    'bugzilla', 'strip', default=0,
+    b'bugzilla', b'strip', default=0,
 )
 configitem(
-    'bugzilla', 'style', default=None,
+    b'bugzilla', b'style', default=None,
 )
 configitem(
-    'bugzilla', 'template', default=None,
+    b'bugzilla', b'template', default=None,
 )
 configitem(
-    'bugzilla', 'timeout', default=5,
+    b'bugzilla', b'timeout', default=5,
 )
 configitem(
-    'bugzilla', 'user', default='bugs',
+    b'bugzilla', b'user', default=b'bugs',
 )
 configitem(
-    'bugzilla', 'usermap', default=None,
+    b'bugzilla', b'usermap', default=None,
 )
 configitem(
-    'bugzilla', 'version', default=None,
+    b'bugzilla', b'version', default=None,
 )
 
 
@@ -404,13 +404,13 @@
 
     def __init__(self, ui):
         self.ui = ui
-        usermap = self.ui.config('bugzilla', 'usermap')
+        usermap = self.ui.config(b'bugzilla', b'usermap')
         if usermap:
-            self.ui.readconfig(usermap, sections=['usermap'])
+            self.ui.readconfig(usermap, sections=[b'usermap'])
 
     def map_committer(self, user):
         '''map name of committer to Bugzilla user name.'''
-        for committer, bzuser in self.ui.configitems('usermap'):
+        for committer, bzuser in self.ui.configitems(b'usermap'):
             if committer.lower() == user.lower():
                 return bzuser
         return user
@@ -457,7 +457,7 @@
     @staticmethod
     def sql_buglist(ids):
         '''return SQL-friendly list of bug ids'''
-        return '(' + ','.join(map(str, ids)) + ')'
+        return b'(' + b','.join(map(str, ids)) + b')'
 
     _MySQLdb = None
 
@@ -467,18 +467,20 @@
 
             bzmysql._MySQLdb = mysql
         except ImportError as err:
-            raise error.Abort(_('python mysql support not available: %s') % err)
+            raise error.Abort(
+                _(b'python mysql support not available: %s') % err
+            )
 
         bzaccess.__init__(self, ui)
 
-        host = self.ui.config('bugzilla', 'host')
-        user = self.ui.config('bugzilla', 'user')
-        passwd = self.ui.config('bugzilla', 'password')
-        db = self.ui.config('bugzilla', 'db')
-        timeout = int(self.ui.config('bugzilla', 'timeout'))
+        host = self.ui.config(b'bugzilla', b'host')
+        user = self.ui.config(b'bugzilla', b'user')
+        passwd = self.ui.config(b'bugzilla', b'password')
+        db = self.ui.config(b'bugzilla', b'db')
+        timeout = int(self.ui.config(b'bugzilla', b'timeout'))
         self.ui.note(
-            _('connecting to %s:%s as %s, password %s\n')
-            % (host, db, user, '*' * len(passwd))
+            _(b'connecting to %s:%s as %s, password %s\n')
+            % (host, db, user, b'*' * len(passwd))
         )
         self.conn = bzmysql._MySQLdb.connect(
             host=host, user=user, passwd=passwd, db=db, connect_timeout=timeout
@@ -486,35 +488,35 @@
         self.cursor = self.conn.cursor()
         self.longdesc_id = self.get_longdesc_id()
         self.user_ids = {}
-        self.default_notify = "cd %(bzdir)s && ./processmail %(id)s %(user)s"
+        self.default_notify = b"cd %(bzdir)s && ./processmail %(id)s %(user)s"
 
     def run(self, *args, **kwargs):
         '''run a query.'''
-        self.ui.note(_('query: %s %s\n') % (args, kwargs))
+        self.ui.note(_(b'query: %s %s\n') % (args, kwargs))
         try:
             self.cursor.execute(*args, **kwargs)
         except bzmysql._MySQLdb.MySQLError:
-            self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
+            self.ui.note(_(b'failed query: %s %s\n') % (args, kwargs))
             raise
 
     def get_longdesc_id(self):
         '''get identity of longdesc field'''
-        self.run('select fieldid from fielddefs where name = "longdesc"')
+        self.run(b'select fieldid from fielddefs where name = "longdesc"')
         ids = self.cursor.fetchall()
         if len(ids) != 1:
-            raise error.Abort(_('unknown database schema'))
+            raise error.Abort(_(b'unknown database schema'))
         return ids[0][0]
 
     def filter_real_bug_ids(self, bugs):
         '''filter not-existing bugs from set.'''
         self.run(
-            'select bug_id from bugs where bug_id in %s'
+            b'select bug_id from bugs where bug_id in %s'
             % bzmysql.sql_buglist(bugs.keys())
         )
         existing = [id for (id,) in self.cursor.fetchall()]
         for id in bugs.keys():
             if id not in existing:
-                self.ui.status(_('bug %d does not exist\n') % id)
+                self.ui.status(_(b'bug %d does not exist\n') % id)
                 del bugs[id]
 
     def filter_cset_known_bug_ids(self, node, bugs):
@@ -526,36 +528,36 @@
         )
         for (id,) in self.cursor.fetchall():
             self.ui.status(
-                _('bug %d already knows about changeset %s\n')
+                _(b'bug %d already knows about changeset %s\n')
                 % (id, short(node))
             )
             del bugs[id]
 
     def notify(self, bugs, committer):
         '''tell bugzilla to send mail.'''
-        self.ui.status(_('telling bugzilla to send mail:\n'))
+        self.ui.status(_(b'telling bugzilla to send mail:\n'))
         (user, userid) = self.get_bugzilla_user(committer)
         for id in bugs.keys():
-            self.ui.status(_('  bug %s\n') % id)
-            cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify)
-            bzdir = self.ui.config('bugzilla', 'bzdir')
+            self.ui.status(_(b'  bug %s\n') % id)
+            cmdfmt = self.ui.config(b'bugzilla', b'notify', self.default_notify)
+            bzdir = self.ui.config(b'bugzilla', b'bzdir')
             try:
                 # Backwards-compatible with old notify string, which
                 # took one string. This will throw with a new format
                 # string.
                 cmd = cmdfmt % id
             except TypeError:
-                cmd = cmdfmt % {'bzdir': bzdir, 'id': id, 'user': user}
-            self.ui.note(_('running notify command %s\n') % cmd)
-            fp = procutil.popen('(%s) 2>&1' % cmd, 'rb')
+                cmd = cmdfmt % {b'bzdir': bzdir, b'id': id, b'user': user}
+            self.ui.note(_(b'running notify command %s\n') % cmd)
+            fp = procutil.popen(b'(%s) 2>&1' % cmd, b'rb')
             out = util.fromnativeeol(fp.read())
             ret = fp.close()
             if ret:
                 self.ui.warn(out)
                 raise error.Abort(
-                    _('bugzilla notify command %s') % procutil.explainexit(ret)
+                    _(b'bugzilla notify command %s') % procutil.explainexit(ret)
                 )
-        self.ui.status(_('done\n'))
+        self.ui.status(_(b'done\n'))
 
     def get_user_id(self, user):
         '''look up numeric bugzilla user id.'''
@@ -565,7 +567,7 @@
             try:
                 userid = int(user)
             except ValueError:
-                self.ui.note(_('looking up user %s\n') % user)
+                self.ui.note(_(b'looking up user %s\n') % user)
                 self.run(
                     '''select userid from profiles
                             where login_name like %s''',
@@ -587,16 +589,16 @@
             userid = self.get_user_id(user)
         except KeyError:
             try:
-                defaultuser = self.ui.config('bugzilla', 'bzuser')
+                defaultuser = self.ui.config(b'bugzilla', b'bzuser')
                 if not defaultuser:
                     raise error.Abort(
-                        _('cannot find bugzilla user id for %s') % user
+                        _(b'cannot find bugzilla user id for %s') % user
                     )
                 userid = self.get_user_id(defaultuser)
                 user = defaultuser
             except KeyError:
                 raise error.Abort(
-                    _('cannot find bugzilla user id for %s or %s')
+                    _(b'cannot find bugzilla user id for %s or %s')
                     % (user, defaultuser)
                 )
         return (user, userid)
@@ -607,7 +609,7 @@
         Try adding comment as committer of changeset, otherwise as
         default bugzilla user.'''
         if len(newstate) > 0:
-            self.ui.warn(_("Bugzilla/MySQL cannot update bug state\n"))
+            self.ui.warn(_(b"Bugzilla/MySQL cannot update bug state\n"))
 
         (user, userid) = self.get_bugzilla_user(committer)
         now = time.strftime(r'%Y-%m-%d %H:%M:%S')
@@ -631,7 +633,7 @@
     def __init__(self, ui):
         bzmysql.__init__(self, ui)
         self.default_notify = (
-            "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s"
+            b"cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s"
         )
 
 
@@ -643,10 +645,10 @@
 
     def get_longdesc_id(self):
         '''get identity of longdesc field'''
-        self.run('select id from fielddefs where name = "longdesc"')
+        self.run(b'select id from fielddefs where name = "longdesc"')
         ids = self.cursor.fetchall()
         if len(ids) != 1:
-            raise error.Abort(_('unknown database schema'))
+            raise error.Abort(_(b'unknown database schema'))
         return ids[0][0]
 
 
@@ -674,7 +676,7 @@
     def send_cookies(self, connection):
         if self.cookies:
             for cookie in self.cookies:
-                connection.putheader("Cookie", cookie)
+                connection.putheader(b"Cookie", cookie)
 
     def request(self, host, handler, request_body, verbose=0):
         self.verbose = verbose
@@ -702,9 +704,9 @@
             response = h._conn.getresponse()
 
         # Add any cookie definitions to our list.
-        for header in response.msg.getallmatchingheaders("Set-Cookie"):
-            val = header.split(": ", 1)[1]
-            cookie = val.split(";", 1)[0]
+        for header in response.msg.getallmatchingheaders(b"Set-Cookie"):
+            val = header.split(b": ", 1)[1]
+            cookie = val.split(b";", 1)[0]
             self.cookies.append(cookie)
 
         if response.status != 200:
@@ -729,13 +731,13 @@
 # inheritance with a new-style class.
 class cookietransport(cookietransportrequest, xmlrpclib.Transport):
     def __init__(self, use_datetime=0):
-        if util.safehasattr(xmlrpclib.Transport, "__init__"):
+        if util.safehasattr(xmlrpclib.Transport, b"__init__"):
             xmlrpclib.Transport.__init__(self, use_datetime)
 
 
 class cookiesafetransport(cookietransportrequest, xmlrpclib.SafeTransport):
     def __init__(self, use_datetime=0):
-        if util.safehasattr(xmlrpclib.Transport, "__init__"):
+        if util.safehasattr(xmlrpclib.Transport, b"__init__"):
             xmlrpclib.SafeTransport.__init__(self, use_datetime)
 
 
@@ -748,26 +750,26 @@
     def __init__(self, ui):
         bzaccess.__init__(self, ui)
 
-        bzweb = self.ui.config('bugzilla', 'bzurl')
-        bzweb = bzweb.rstrip("/") + "/xmlrpc.cgi"
+        bzweb = self.ui.config(b'bugzilla', b'bzurl')
+        bzweb = bzweb.rstrip(b"/") + b"/xmlrpc.cgi"
 
-        user = self.ui.config('bugzilla', 'user')
-        passwd = self.ui.config('bugzilla', 'password')
+        user = self.ui.config(b'bugzilla', b'user')
+        passwd = self.ui.config(b'bugzilla', b'password')
 
-        self.fixstatus = self.ui.config('bugzilla', 'fixstatus')
-        self.fixresolution = self.ui.config('bugzilla', 'fixresolution')
+        self.fixstatus = self.ui.config(b'bugzilla', b'fixstatus')
+        self.fixresolution = self.ui.config(b'bugzilla', b'fixresolution')
 
         self.bzproxy = xmlrpclib.ServerProxy(bzweb, self.transport(bzweb))
-        ver = self.bzproxy.Bugzilla.version()['version'].split('.')
+        ver = self.bzproxy.Bugzilla.version()[b'version'].split(b'.')
         self.bzvermajor = int(ver[0])
         self.bzverminor = int(ver[1])
         login = self.bzproxy.User.login(
-            {'login': user, 'password': passwd, 'restrict_login': True}
+            {b'login': user, b'password': passwd, b'restrict_login': True}
         )
-        self.bztoken = login.get('token', '')
+        self.bztoken = login.get(b'token', b'')
 
     def transport(self, uri):
-        if util.urlreq.urlparse(uri, "http")[0] == "https":
+        if util.urlreq.urlparse(uri, b"http")[0] == b"https":
             return cookiesafetransport()
         else:
             return cookietransport()
@@ -775,56 +777,58 @@
     def get_bug_comments(self, id):
         """Return a string with all comment text for a bug."""
         c = self.bzproxy.Bug.comments(
-            {'ids': [id], 'include_fields': ['text'], 'token': self.bztoken}
+            {b'ids': [id], b'include_fields': [b'text'], b'token': self.bztoken}
         )
-        return ''.join([t['text'] for t in c['bugs']['%d' % id]['comments']])
+        return b''.join(
+            [t[b'text'] for t in c[b'bugs'][b'%d' % id][b'comments']]
+        )
 
     def filter_real_bug_ids(self, bugs):
         probe = self.bzproxy.Bug.get(
             {
-                'ids': sorted(bugs.keys()),
-                'include_fields': [],
-                'permissive': True,
-                'token': self.bztoken,
+                b'ids': sorted(bugs.keys()),
+                b'include_fields': [],
+                b'permissive': True,
+                b'token': self.bztoken,
             }
         )
-        for badbug in probe['faults']:
-            id = badbug['id']
-            self.ui.status(_('bug %d does not exist\n') % id)
+        for badbug in probe[b'faults']:
+            id = badbug[b'id']
+            self.ui.status(_(b'bug %d does not exist\n') % id)
             del bugs[id]
 
     def filter_cset_known_bug_ids(self, node, bugs):
         for id in sorted(bugs.keys()):
             if self.get_bug_comments(id).find(short(node)) != -1:
                 self.ui.status(
-                    _('bug %d already knows about changeset %s\n')
+                    _(b'bug %d already knows about changeset %s\n')
                     % (id, short(node))
                 )
                 del bugs[id]
 
     def updatebug(self, bugid, newstate, text, committer):
         args = {}
-        if 'hours' in newstate:
-            args['work_time'] = newstate['hours']
+        if b'hours' in newstate:
+            args[b'work_time'] = newstate[b'hours']
 
         if self.bzvermajor >= 4:
-            args['ids'] = [bugid]
-            args['comment'] = {'body': text}
-            if 'fix' in newstate:
-                args['status'] = self.fixstatus
-                args['resolution'] = self.fixresolution
-            args['token'] = self.bztoken
+            args[b'ids'] = [bugid]
+            args[b'comment'] = {b'body': text}
+            if b'fix' in newstate:
+                args[b'status'] = self.fixstatus
+                args[b'resolution'] = self.fixresolution
+            args[b'token'] = self.bztoken
             self.bzproxy.Bug.update(args)
         else:
-            if 'fix' in newstate:
+            if b'fix' in newstate:
                 self.ui.warn(
                     _(
-                        "Bugzilla/XMLRPC needs Bugzilla 4.0 or later "
-                        "to mark bugs fixed\n"
+                        b"Bugzilla/XMLRPC needs Bugzilla 4.0 or later "
+                        b"to mark bugs fixed\n"
                     )
                 )
-            args['id'] = bugid
-            args['comment'] = text
+            args[b'id'] = bugid
+            args[b'comment'] = text
             self.bzproxy.Bug.add_comment(args)
 
 
@@ -851,18 +855,18 @@
     def __init__(self, ui):
         bzxmlrpc.__init__(self, ui)
 
-        self.bzemail = self.ui.config('bugzilla', 'bzemail')
+        self.bzemail = self.ui.config(b'bugzilla', b'bzemail')
         if not self.bzemail:
-            raise error.Abort(_("configuration 'bzemail' missing"))
+            raise error.Abort(_(b"configuration 'bzemail' missing"))
         mail.validateconfig(self.ui)
 
     def makecommandline(self, fieldname, value):
         if self.bzvermajor >= 4:
-            return "@%s %s" % (fieldname, pycompat.bytestr(value))
+            return b"@%s %s" % (fieldname, pycompat.bytestr(value))
         else:
-            if fieldname == "id":
-                fieldname = "bug_id"
-            return "@%s = %s" % (fieldname, pycompat.bytestr(value))
+            if fieldname == b"id":
+                fieldname = b"bug_id"
+            return b"@%s = %s" % (fieldname, pycompat.bytestr(value))
 
     def send_bug_modify_email(self, bugid, commands, comment, committer):
         '''send modification message to Bugzilla bug via email.
@@ -877,39 +881,41 @@
         '''
         user = self.map_committer(committer)
         matches = self.bzproxy.User.get(
-            {'match': [user], 'token': self.bztoken}
+            {b'match': [user], b'token': self.bztoken}
         )
-        if not matches['users']:
-            user = self.ui.config('bugzilla', 'user')
+        if not matches[b'users']:
+            user = self.ui.config(b'bugzilla', b'user')
             matches = self.bzproxy.User.get(
-                {'match': [user], 'token': self.bztoken}
+                {b'match': [user], b'token': self.bztoken}
             )
-            if not matches['users']:
+            if not matches[b'users']:
                 raise error.Abort(
-                    _("default bugzilla user %s email not found") % user
+                    _(b"default bugzilla user %s email not found") % user
                 )
-        user = matches['users'][0]['email']
-        commands.append(self.makecommandline("id", bugid))
+        user = matches[b'users'][0][b'email']
+        commands.append(self.makecommandline(b"id", bugid))
 
-        text = "\n".join(commands) + "\n\n" + comment
+        text = b"\n".join(commands) + b"\n\n" + comment
 
         _charsets = mail._charsets(self.ui)
         user = mail.addressencode(self.ui, user, _charsets)
         bzemail = mail.addressencode(self.ui, self.bzemail, _charsets)
         msg = mail.mimeencode(self.ui, text, _charsets)
-        msg['From'] = user
-        msg['To'] = bzemail
-        msg['Subject'] = mail.headencode(self.ui, "Bug modification", _charsets)
+        msg[b'From'] = user
+        msg[b'To'] = bzemail
+        msg[b'Subject'] = mail.headencode(
+            self.ui, b"Bug modification", _charsets
+        )
         sendmail = mail.connect(self.ui)
         sendmail(user, bzemail, msg.as_string())
 
     def updatebug(self, bugid, newstate, text, committer):
         cmds = []
-        if 'hours' in newstate:
-            cmds.append(self.makecommandline("work_time", newstate['hours']))
-        if 'fix' in newstate:
-            cmds.append(self.makecommandline("bug_status", self.fixstatus))
-            cmds.append(self.makecommandline("resolution", self.fixresolution))
+        if b'hours' in newstate:
+            cmds.append(self.makecommandline(b"work_time", newstate[b'hours']))
+        if b'fix' in newstate:
+            cmds.append(self.makecommandline(b"bug_status", self.fixstatus))
+            cmds.append(self.makecommandline(b"resolution", self.fixresolution))
         self.send_bug_modify_email(bugid, cmds, text, committer)
 
 
@@ -924,26 +930,26 @@
 
     def __init__(self, ui):
         bzaccess.__init__(self, ui)
-        bz = self.ui.config('bugzilla', 'bzurl')
-        self.bzroot = '/'.join([bz, 'rest'])
-        self.apikey = self.ui.config('bugzilla', 'apikey')
-        self.user = self.ui.config('bugzilla', 'user')
-        self.passwd = self.ui.config('bugzilla', 'password')
-        self.fixstatus = self.ui.config('bugzilla', 'fixstatus')
-        self.fixresolution = self.ui.config('bugzilla', 'fixresolution')
+        bz = self.ui.config(b'bugzilla', b'bzurl')
+        self.bzroot = b'/'.join([bz, b'rest'])
+        self.apikey = self.ui.config(b'bugzilla', b'apikey')
+        self.user = self.ui.config(b'bugzilla', b'user')
+        self.passwd = self.ui.config(b'bugzilla', b'password')
+        self.fixstatus = self.ui.config(b'bugzilla', b'fixstatus')
+        self.fixresolution = self.ui.config(b'bugzilla', b'fixresolution')
 
     def apiurl(self, targets, include_fields=None):
-        url = '/'.join([self.bzroot] + [pycompat.bytestr(t) for t in targets])
+        url = b'/'.join([self.bzroot] + [pycompat.bytestr(t) for t in targets])
         qv = {}
         if self.apikey:
-            qv['api_key'] = self.apikey
+            qv[b'api_key'] = self.apikey
         elif self.user and self.passwd:
-            qv['login'] = self.user
-            qv['password'] = self.passwd
+            qv[b'login'] = self.user
+            qv[b'password'] = self.passwd
         if include_fields:
-            qv['include_fields'] = include_fields
+            qv[b'include_fields'] = include_fields
         if qv:
-            url = '%s?%s' % (url, util.urlreq.urlencode(qv))
+            url = b'%s?%s' % (url, util.urlreq.urlencode(qv))
         return url
 
     def _fetch(self, burl):
@@ -952,30 +958,30 @@
             return json.loads(resp.read())
         except util.urlerr.httperror as inst:
             if inst.code == 401:
-                raise error.Abort(_('authorization failed'))
+                raise error.Abort(_(b'authorization failed'))
             if inst.code == 404:
                 raise NotFound()
             else:
                 raise
 
-    def _submit(self, burl, data, method='POST'):
+    def _submit(self, burl, data, method=b'POST'):
         data = json.dumps(data)
-        if method == 'PUT':
+        if method == b'PUT':
 
             class putrequest(util.urlreq.request):
                 def get_method(self):
-                    return 'PUT'
+                    return b'PUT'
 
             request_type = putrequest
         else:
             request_type = util.urlreq.request
-        req = request_type(burl, data, {'Content-Type': 'application/json'})
+        req = request_type(burl, data, {b'Content-Type': b'application/json'})
         try:
             resp = url.opener(self.ui).open(req)
             return json.loads(resp.read())
         except util.urlerr.httperror as inst:
             if inst.code == 401:
-                raise error.Abort(_('authorization failed'))
+                raise error.Abort(_(b'authorization failed'))
             if inst.code == 404:
                 raise NotFound()
             else:
@@ -985,7 +991,7 @@
         '''remove bug IDs that do not exist in Bugzilla from bugs.'''
         badbugs = set()
         for bugid in bugs:
-            burl = self.apiurl(('bug', bugid), include_fields='status')
+            burl = self.apiurl((b'bug', bugid), include_fields=b'status')
             try:
                 self._fetch(burl)
             except NotFound:
@@ -997,12 +1003,15 @@
         '''remove bug IDs where node occurs in comment text from bugs.'''
         sn = short(node)
         for bugid in bugs.keys():
-            burl = self.apiurl(('bug', bugid, 'comment'), include_fields='text')
+            burl = self.apiurl(
+                (b'bug', bugid, b'comment'), include_fields=b'text'
+            )
             result = self._fetch(burl)
-            comments = result['bugs'][pycompat.bytestr(bugid)]['comments']
-            if any(sn in c['text'] for c in comments):
+            comments = result[b'bugs'][pycompat.bytestr(bugid)][b'comments']
+            if any(sn in c[b'text'] for c in comments):
                 self.ui.status(
-                    _('bug %d already knows about changeset %s\n') % (bugid, sn)
+                    _(b'bug %d already knows about changeset %s\n')
+                    % (bugid, sn)
                 )
                 del bugs[bugid]
 
@@ -1013,28 +1022,32 @@
         the changeset. Otherwise use the default Bugzilla user.
         '''
         bugmod = {}
-        if 'hours' in newstate:
-            bugmod['work_time'] = newstate['hours']
-        if 'fix' in newstate:
-            bugmod['status'] = self.fixstatus
-            bugmod['resolution'] = self.fixresolution
+        if b'hours' in newstate:
+            bugmod[b'work_time'] = newstate[b'hours']
+        if b'fix' in newstate:
+            bugmod[b'status'] = self.fixstatus
+            bugmod[b'resolution'] = self.fixresolution
         if bugmod:
             # if we have to change the bugs state do it here
-            bugmod['comment'] = {
-                'comment': text,
-                'is_private': False,
-                'is_markdown': False,
+            bugmod[b'comment'] = {
+                b'comment': text,
+                b'is_private': False,
+                b'is_markdown': False,
             }
-            burl = self.apiurl(('bug', bugid))
-            self._submit(burl, bugmod, method='PUT')
-            self.ui.debug('updated bug %s\n' % bugid)
+            burl = self.apiurl((b'bug', bugid))
+            self._submit(burl, bugmod, method=b'PUT')
+            self.ui.debug(b'updated bug %s\n' % bugid)
         else:
-            burl = self.apiurl(('bug', bugid, 'comment'))
+            burl = self.apiurl((b'bug', bugid, b'comment'))
             self._submit(
                 burl,
-                {'comment': text, 'is_private': False, 'is_markdown': False,},
+                {
+                    b'comment': text,
+                    b'is_private': False,
+                    b'is_markdown': False,
+                },
             )
-            self.ui.debug('added comment to bug %s\n' % bugid)
+            self.ui.debug(b'added comment to bug %s\n' % bugid)
 
     def notify(self, bugs, committer):
         '''Force sending of Bugzilla notification emails.
@@ -1049,32 +1062,32 @@
     # supported versions of bugzilla. different versions have
     # different schemas.
     _versions = {
-        '2.16': bzmysql,
-        '2.18': bzmysql_2_18,
-        '3.0': bzmysql_3_0,
-        'xmlrpc': bzxmlrpc,
-        'xmlrpc+email': bzxmlrpcemail,
-        'restapi': bzrestapi,
+        b'2.16': bzmysql,
+        b'2.18': bzmysql_2_18,
+        b'3.0': bzmysql_3_0,
+        b'xmlrpc': bzxmlrpc,
+        b'xmlrpc+email': bzxmlrpcemail,
+        b'restapi': bzrestapi,
     }
 
     def __init__(self, ui, repo):
         self.ui = ui
         self.repo = repo
 
-        bzversion = self.ui.config('bugzilla', 'version')
+        bzversion = self.ui.config(b'bugzilla', b'version')
         try:
             bzclass = bugzilla._versions[bzversion]
         except KeyError:
             raise error.Abort(
-                _('bugzilla version %s not supported') % bzversion
+                _(b'bugzilla version %s not supported') % bzversion
             )
         self.bzdriver = bzclass(self.ui)
 
         self.bug_re = re.compile(
-            self.ui.config('bugzilla', 'regexp'), re.IGNORECASE
+            self.ui.config(b'bugzilla', b'regexp'), re.IGNORECASE
         )
         self.fix_re = re.compile(
-            self.ui.config('bugzilla', 'fixregexp'), re.IGNORECASE
+            self.ui.config(b'bugzilla', b'fixregexp'), re.IGNORECASE
         )
         self.split_re = re.compile(br'\D+')
 
@@ -1106,25 +1119,25 @@
             start = m.end()
             if m is bugmatch:
                 bugmatch = self.bug_re.search(ctx.description(), start)
-                if 'fix' in bugattribs:
-                    del bugattribs['fix']
+                if b'fix' in bugattribs:
+                    del bugattribs[b'fix']
             else:
                 fixmatch = self.fix_re.search(ctx.description(), start)
-                bugattribs['fix'] = None
+                bugattribs[b'fix'] = None
 
             try:
-                ids = m.group('ids')
+                ids = m.group(b'ids')
             except IndexError:
                 ids = m.group(1)
             try:
-                hours = float(m.group('hours'))
-                bugattribs['hours'] = hours
+                hours = float(m.group(b'hours'))
+                bugattribs[b'hours'] = hours
             except IndexError:
                 pass
             except TypeError:
                 pass
             except ValueError:
-                self.ui.status(_("%s: invalid hours\n") % m.group('hours'))
+                self.ui.status(_(b"%s: invalid hours\n") % m.group(b'hours'))
 
             for id in self.split_re.split(ids):
                 if not id:
@@ -1142,10 +1155,10 @@
         def webroot(root):
             '''strip leading prefix of repo root and turn into
             url-safe path.'''
-            count = int(self.ui.config('bugzilla', 'strip'))
+            count = int(self.ui.config(b'bugzilla', b'strip'))
             root = util.pconvert(root)
             while count > 0:
-                c = root.find('/')
+                c = root.find(b'/')
                 if c == -1:
                     break
                 root = root[c + 1 :]
@@ -1153,13 +1166,13 @@
             return root
 
         mapfile = None
-        tmpl = self.ui.config('bugzilla', 'template')
+        tmpl = self.ui.config(b'bugzilla', b'template')
         if not tmpl:
-            mapfile = self.ui.config('bugzilla', 'style')
+            mapfile = self.ui.config(b'bugzilla', b'style')
         if not mapfile and not tmpl:
             tmpl = _(
-                'changeset {node|short} in repo {root} refers '
-                'to bug {bug}.\ndetails:\n\t{desc|tabindent}'
+                b'changeset {node|short} in repo {root} refers '
+                b'to bug {bug}.\ndetails:\n\t{desc|tabindent}'
             )
         spec = logcmdutil.templatespec(tmpl, mapfile)
         t = logcmdutil.changesettemplater(self.ui, self.repo, spec)
@@ -1168,7 +1181,7 @@
             ctx,
             changes=ctx.changeset(),
             bug=pycompat.bytestr(bugid),
-            hgweb=self.ui.config('web', 'baseurl'),
+            hgweb=self.ui.config(b'web', b'baseurl'),
             root=self.repo.root,
             webroot=webroot(self.repo.root),
         )
@@ -1188,7 +1201,7 @@
     seen multiple times does not fill bug with duplicate data.'''
     if node is None:
         raise error.Abort(
-            _('hook type %s does not pass a changeset id') % hooktype
+            _(b'hook type %s does not pass a changeset id') % hooktype
         )
     try:
         bz = bugzilla(ui, repo)
@@ -1199,4 +1212,4 @@
                 bz.update(bug, bugs[bug], ctx)
             bz.notify(bugs, stringutil.email(ctx.user()))
     except Exception as e:
-        raise error.Abort(_('Bugzilla error: %s') % e)
+        raise error.Abort(_(b'Bugzilla error: %s') % e)
--- a/hgext/censor.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/censor.py	Sun Oct 06 09:48:39 2019 -0400
@@ -42,49 +42,55 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 
 @command(
-    'censor',
+    b'censor',
     [
-        ('r', 'rev', '', _('censor file from specified revision'), _('REV')),
-        ('t', 'tombstone', '', _('replacement tombstone data'), _('TEXT')),
+        (
+            b'r',
+            b'rev',
+            b'',
+            _(b'censor file from specified revision'),
+            _(b'REV'),
+        ),
+        (b't', b'tombstone', b'', _(b'replacement tombstone data'), _(b'TEXT')),
     ],
-    _('-r REV [-t TEXT] [FILE]'),
+    _(b'-r REV [-t TEXT] [FILE]'),
     helpcategory=command.CATEGORY_MAINTENANCE,
 )
-def censor(ui, repo, path, rev='', tombstone='', **opts):
+def censor(ui, repo, path, rev=b'', tombstone=b'', **opts):
     with repo.wlock(), repo.lock():
         return _docensor(ui, repo, path, rev, tombstone, **opts)
 
 
-def _docensor(ui, repo, path, rev='', tombstone='', **opts):
+def _docensor(ui, repo, path, rev=b'', tombstone=b'', **opts):
     if not path:
-        raise error.Abort(_('must specify file path to censor'))
+        raise error.Abort(_(b'must specify file path to censor'))
     if not rev:
-        raise error.Abort(_('must specify revision to censor'))
+        raise error.Abort(_(b'must specify revision to censor'))
 
     wctx = repo[None]
 
     m = scmutil.match(wctx, (path,))
     if m.anypats() or len(m.files()) != 1:
-        raise error.Abort(_('can only specify an explicit filename'))
+        raise error.Abort(_(b'can only specify an explicit filename'))
     path = m.files()[0]
     flog = repo.file(path)
     if not len(flog):
-        raise error.Abort(_('cannot censor file with no history'))
+        raise error.Abort(_(b'cannot censor file with no history'))
 
     rev = scmutil.revsingle(repo, rev, rev).rev()
     try:
         ctx = repo[rev]
     except KeyError:
-        raise error.Abort(_('invalid revision identifier %s') % rev)
+        raise error.Abort(_(b'invalid revision identifier %s') % rev)
 
     try:
         fctx = ctx.filectx(path)
     except error.LookupError:
-        raise error.Abort(_('file does not exist at revision %s') % rev)
+        raise error.Abort(_(b'file does not exist at revision %s') % rev)
 
     fnode = fctx.filenode()
     heads = []
@@ -93,17 +99,17 @@
         if path in hc and hc.filenode(path) == fnode:
             heads.append(hc)
     if heads:
-        headlist = ', '.join([short(c.node()) for c in heads])
+        headlist = b', '.join([short(c.node()) for c in heads])
         raise error.Abort(
-            _('cannot censor file in heads (%s)') % headlist,
-            hint=_('clean/delete and commit first'),
+            _(b'cannot censor file in heads (%s)') % headlist,
+            hint=_(b'clean/delete and commit first'),
         )
 
     wp = wctx.parents()
     if ctx.node() in [p.node() for p in wp]:
         raise error.Abort(
-            _('cannot censor working directory'),
-            hint=_('clean/delete/update first'),
+            _(b'cannot censor working directory'),
+            hint=_(b'clean/delete/update first'),
         )
 
     with repo.transaction(b'censor') as tr:
--- a/hgext/children.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/children.py	Sun Oct 06 09:48:39 2019 -0400
@@ -33,14 +33,22 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 
 @command(
-    'children',
-    [('r', 'rev', '.', _('show children of the specified revision'), _('REV')),]
+    b'children',
+    [
+        (
+            b'r',
+            b'rev',
+            b'.',
+            _(b'show children of the specified revision'),
+            _(b'REV'),
+        ),
+    ]
     + templateopts,
-    _('hg children [-r REV] [FILE]'),
+    _(b'hg children [-r REV] [FILE]'),
     helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
     inferrepo=True,
 )
@@ -62,7 +70,7 @@
 
     """
     opts = pycompat.byteskwargs(opts)
-    rev = opts.get('rev')
+    rev = opts.get(b'rev')
     ctx = scmutil.revsingle(repo, rev)
     if file_:
         fctx = repo.filectx(file_, changeid=ctx.rev())
--- a/hgext/churn.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/churn.py	Sun Oct 06 09:48:39 2019 -0400
@@ -32,17 +32,17 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 
 def changedlines(ui, repo, ctx1, ctx2, fns):
     added, removed = 0, 0
     fmatch = scmutil.matchfiles(repo, fns)
-    diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
-    for l in diff.split('\n'):
-        if l.startswith("+") and not l.startswith("+++ "):
+    diff = b''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
+    for l in diff.split(b'\n'):
+        if l.startswith(b"+") and not l.startswith(b"+++ "):
             added += 1
-        elif l.startswith("-") and not l.startswith("--- "):
+        elif l.startswith(b"-") and not l.startswith(b"--- "):
             removed += 1
     return (added, removed)
 
@@ -50,17 +50,17 @@
 def countrate(ui, repo, amap, *pats, **opts):
     """Calculate stats"""
     opts = pycompat.byteskwargs(opts)
-    if opts.get('dateformat'):
+    if opts.get(b'dateformat'):
 
         def getkey(ctx):
             t, tz = ctx.date()
             date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
             return encoding.strtolocal(
-                date.strftime(encoding.strfromlocal(opts['dateformat']))
+                date.strftime(encoding.strfromlocal(opts[b'dateformat']))
             )
 
     else:
-        tmpl = opts.get('oldtemplate') or opts.get('template')
+        tmpl = opts.get(b'oldtemplate') or opts.get(b'template')
         tmpl = logcmdutil.maketemplater(ui, repo, tmpl)
 
         def getkey(ctx):
@@ -69,12 +69,12 @@
             return ui.popbuffer()
 
     progress = ui.makeprogress(
-        _('analyzing'), unit=_('revisions'), total=len(repo)
+        _(b'analyzing'), unit=_(b'revisions'), total=len(repo)
     )
     rate = {}
     df = False
-    if opts.get('date'):
-        df = dateutil.matchdate(opts['date'])
+    if opts.get(b'date'):
+        df = dateutil.matchdate(opts[b'date'])
 
     m = scmutil.match(repo[None], pats, opts)
 
@@ -85,12 +85,12 @@
 
         key = getkey(ctx).strip()
         key = amap.get(key, key)  # alias remap
-        if opts.get('changesets'):
+        if opts.get(b'changesets'):
             rate[key] = (rate.get(key, (0,))[0] + 1, 0)
         else:
             parents = ctx.parents()
             if len(parents) > 1:
-                ui.note(_('revision %d is a merge, ignoring...\n') % (rev,))
+                ui.note(_(b'revision %d is a merge, ignoring...\n') % (rev,))
                 return
 
             ctx1 = parents[0]
@@ -108,50 +108,50 @@
 
 
 @command(
-    'churn',
+    b'churn',
     [
         (
-            'r',
-            'rev',
+            b'r',
+            b'rev',
             [],
-            _('count rate for the specified revision or revset'),
-            _('REV'),
+            _(b'count rate for the specified revision or revset'),
+            _(b'REV'),
         ),
         (
-            'd',
-            'date',
-            '',
-            _('count rate for revisions matching date spec'),
-            _('DATE'),
+            b'd',
+            b'date',
+            b'',
+            _(b'count rate for revisions matching date spec'),
+            _(b'DATE'),
         ),
         (
-            't',
-            'oldtemplate',
-            '',
-            _('template to group changesets (DEPRECATED)'),
-            _('TEMPLATE'),
+            b't',
+            b'oldtemplate',
+            b'',
+            _(b'template to group changesets (DEPRECATED)'),
+            _(b'TEMPLATE'),
         ),
         (
-            'T',
-            'template',
-            '{author|email}',
-            _('template to group changesets'),
-            _('TEMPLATE'),
+            b'T',
+            b'template',
+            b'{author|email}',
+            _(b'template to group changesets'),
+            _(b'TEMPLATE'),
         ),
         (
-            'f',
-            'dateformat',
-            '',
-            _('strftime-compatible format for grouping by date'),
-            _('FORMAT'),
+            b'f',
+            b'dateformat',
+            b'',
+            _(b'strftime-compatible format for grouping by date'),
+            _(b'FORMAT'),
         ),
-        ('c', 'changesets', False, _('count rate by number of changesets')),
-        ('s', 'sort', False, _('sort by key (default: sort by count)')),
-        ('', 'diffstat', False, _('display added/removed lines separately')),
-        ('', 'aliases', '', _('file with email aliases'), _('FILE')),
+        (b'c', b'changesets', False, _(b'count rate by number of changesets')),
+        (b's', b'sort', False, _(b'sort by key (default: sort by count)')),
+        (b'', b'diffstat', False, _(b'display added/removed lines separately')),
+        (b'', b'aliases', b'', _(b'file with email aliases'), _(b'FILE')),
     ]
     + cmdutil.walkopts,
-    _("hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]"),
+    _(b"hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]"),
     helpcategory=command.CATEGORY_MAINTENANCE,
     inferrepo=True,
 )
@@ -193,21 +193,21 @@
     '''
 
     def pad(s, l):
-        return s + " " * (l - encoding.colwidth(s))
+        return s + b" " * (l - encoding.colwidth(s))
 
     amap = {}
     aliases = opts.get(r'aliases')
-    if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
-        aliases = repo.wjoin('.hgchurn')
+    if not aliases and os.path.exists(repo.wjoin(b'.hgchurn')):
+        aliases = repo.wjoin(b'.hgchurn')
     if aliases:
-        for l in open(aliases, "rb"):
+        for l in open(aliases, b"rb"):
             try:
-                alias, actual = l.rsplit('=' in l and '=' or None, 1)
+                alias, actual = l.rsplit(b'=' in l and b'=' or None, 1)
                 amap[alias.strip()] = actual.strip()
             except ValueError:
                 l = l.strip()
                 if l:
-                    ui.warn(_("skipping malformed alias: %s\n") % l)
+                    ui.warn(_(b"skipping malformed alias: %s\n") % l)
                 continue
 
     rate = list(countrate(ui, repo, amap, *pats, **opts).items())
@@ -224,7 +224,7 @@
     maxname = max(len(k) for k, v in rate)
 
     ttywidth = ui.termwidth()
-    ui.debug("assuming %i character terminal\n" % ttywidth)
+    ui.debug(b"assuming %i character terminal\n" % ttywidth)
     width = ttywidth - maxname - 2 - 2 - 2
 
     if opts.get(r'diffstat'):
@@ -232,21 +232,21 @@
 
         def format(name, diffstat):
             added, removed = diffstat
-            return "%s %15s %s%s\n" % (
+            return b"%s %15s %s%s\n" % (
                 pad(name, maxname),
-                '+%d/-%d' % (added, removed),
-                ui.label('+' * charnum(added), 'diffstat.inserted'),
-                ui.label('-' * charnum(removed), 'diffstat.deleted'),
+                b'+%d/-%d' % (added, removed),
+                ui.label(b'+' * charnum(added), b'diffstat.inserted'),
+                ui.label(b'-' * charnum(removed), b'diffstat.deleted'),
             )
 
     else:
         width -= 6
 
         def format(name, count):
-            return "%s %6d %s\n" % (
+            return b"%s %6d %s\n" % (
                 pad(name, maxname),
                 sum(count),
-                '*' * charnum(sum(count)),
+                b'*' * charnum(sum(count)),
             )
 
     def charnum(count):
--- a/hgext/clonebundles.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/clonebundles.py	Sun Oct 06 09:48:39 2019 -0400
@@ -201,7 +201,7 @@
     wireprotov1server,
 )
 
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 
 def capabilities(orig, repo, proto):
@@ -210,11 +210,11 @@
     # Only advertise if a manifest exists. This does add some I/O to requests.
     # But this should be cheaper than a wasted network round trip due to
     # missing file.
-    if repo.vfs.exists('clonebundles.manifest'):
-        caps.append('clonebundles')
+    if repo.vfs.exists(b'clonebundles.manifest'):
+        caps.append(b'clonebundles')
 
     return caps
 
 
 def extsetup(ui):
-    extensions.wrapfunction(wireprotov1server, '_capabilities', capabilities)
+    extensions.wrapfunction(wireprotov1server, b'_capabilities', capabilities)
--- a/hgext/closehead.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/closehead.py	Sun Oct 06 09:48:39 2019 -0400
@@ -24,17 +24,17 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 commitopts = cmdutil.commitopts
 commitopts2 = cmdutil.commitopts2
-commitopts3 = [('r', 'rev', [], _('revision to check'), _('REV'))]
+commitopts3 = [(b'r', b'rev', [], _(b'revision to check'), _(b'REV'))]
 
 
 @command(
-    'close-head|close-heads',
+    b'close-head|close-heads',
     commitopts + commitopts2 + commitopts3,
-    _('[OPTION]... [REV]...'),
+    _(b'[OPTION]... [REV]...'),
     helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
     inferrepo=True,
 )
@@ -55,11 +55,11 @@
             text=message,
             files=[],
             filectxfn=None,
-            user=opts.get('user'),
-            date=opts.get('date'),
+            user=opts.get(b'user'),
+            date=opts.get(b'date'),
             extra=extra,
         )
-        tr = repo.transaction('commit')
+        tr = repo.transaction(b'commit')
         ret = repo.commitctx(cctx, True)
         bookmarks.update(repo, [rev, None], ret)
         cctx.markcommitted(ret)
@@ -67,11 +67,11 @@
 
     opts = pycompat.byteskwargs(opts)
 
-    revs += tuple(opts.get('rev', []))
+    revs += tuple(opts.get(b'rev', []))
     revs = scmutil.revrange(repo, revs)
 
     if not revs:
-        raise error.Abort(_('no revisions specified'))
+        raise error.Abort(_(b'no revisions specified'))
 
     heads = []
     for branch in repo.branchmap():
@@ -79,17 +79,17 @@
     heads = set(repo[h].rev() for h in heads)
     for rev in revs:
         if rev not in heads:
-            raise error.Abort(_('revision is not an open head: %d') % rev)
+            raise error.Abort(_(b'revision is not an open head: %d') % rev)
 
     message = cmdutil.logmessage(ui, opts)
     if not message:
-        raise error.Abort(_("no commit message specified with -l or -m"))
-    extra = {'close': '1'}
+        raise error.Abort(_(b"no commit message specified with -l or -m"))
+    extra = {b'close': b'1'}
 
     with repo.wlock(), repo.lock():
         for rev in revs:
             r = repo[rev]
             branch = r.branch()
-            extra['branch'] = branch
+            extra[b'branch'] = branch
             docommit(r)
     return 0
--- a/hgext/commitextras.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/commitextras.py	Sun Oct 06 09:48:39 2019 -0400
@@ -22,57 +22,64 @@
 
 cmdtable = {}
 command = registrar.command(cmdtable)
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 usedinternally = {
-    'amend_source',
-    'branch',
-    'close',
-    'histedit_source',
-    'topic',
-    'rebase_source',
-    'intermediate-source',
-    '__touch-noise__',
-    'source',
-    'transplant_source',
+    b'amend_source',
+    b'branch',
+    b'close',
+    b'histedit_source',
+    b'topic',
+    b'rebase_source',
+    b'intermediate-source',
+    b'__touch-noise__',
+    b'source',
+    b'transplant_source',
 }
 
 
 def extsetup(ui):
-    entry = extensions.wrapcommand(commands.table, 'commit', _commit)
+    entry = extensions.wrapcommand(commands.table, b'commit', _commit)
     options = entry[1]
     options.append(
-        ('', 'extra', [], _('set a changeset\'s extra values'), _("KEY=VALUE"))
+        (
+            b'',
+            b'extra',
+            [],
+            _(b'set a changeset\'s extra values'),
+            _(b"KEY=VALUE"),
+        )
     )
 
 
 def _commit(orig, ui, repo, *pats, **opts):
-    if util.safehasattr(repo, 'unfiltered'):
+    if util.safehasattr(repo, b'unfiltered'):
         repo = repo.unfiltered()
 
     class repoextra(repo.__class__):
         def commit(self, *innerpats, **inneropts):
             extras = opts.get(r'extra')
             for raw in extras:
-                if '=' not in raw:
+                if b'=' not in raw:
                     msg = _(
-                        "unable to parse '%s', should follow "
-                        "KEY=VALUE format"
+                        b"unable to parse '%s', should follow "
+                        b"KEY=VALUE format"
                     )
                     raise error.Abort(msg % raw)
-                k, v = raw.split('=', 1)
+                k, v = raw.split(b'=', 1)
                 if not k:
-                    msg = _("unable to parse '%s', keys can't be empty")
+                    msg = _(b"unable to parse '%s', keys can't be empty")
                     raise error.Abort(msg % raw)
                 if re.search(br'[^\w-]', k):
                     msg = _(
-                        "keys can only contain ascii letters, digits,"
-                        " '_' and '-'"
+                        b"keys can only contain ascii letters, digits,"
+                        b" '_' and '-'"
                     )
                     raise error.Abort(msg)
                 if k in usedinternally:
                     msg = _(
-                        "key '%s' is used internally, can't be set " "manually"
+                        b"key '%s' is used internally, can't be set "
+                        b"manually"
                     )
                     raise error.Abort(msg % k)
                 inneropts[r'extra'][k] = v
--- a/hgext/convert/__init__.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/convert/__init__.py	Sun Oct 06 09:48:39 2019 -0400
@@ -24,60 +24,72 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 # Commands definition was moved elsewhere to ease demandload job.
 
 
 @command(
-    'convert',
+    b'convert',
     [
         (
-            '',
-            'authors',
-            '',
+            b'',
+            b'authors',
+            b'',
             _(
-                'username mapping filename (DEPRECATED) (use --authormap instead)'
+                b'username mapping filename (DEPRECATED) (use --authormap instead)'
             ),
-            _('FILE'),
+            _(b'FILE'),
         ),
-        ('s', 'source-type', '', _('source repository type'), _('TYPE')),
-        ('d', 'dest-type', '', _('destination repository type'), _('TYPE')),
-        ('r', 'rev', [], _('import up to source revision REV'), _('REV')),
-        ('A', 'authormap', '', _('remap usernames using this file'), _('FILE')),
+        (b's', b'source-type', b'', _(b'source repository type'), _(b'TYPE')),
         (
-            '',
-            'filemap',
-            '',
-            _('remap file names using contents of file'),
-            _('FILE'),
+            b'd',
+            b'dest-type',
+            b'',
+            _(b'destination repository type'),
+            _(b'TYPE'),
+        ),
+        (b'r', b'rev', [], _(b'import up to source revision REV'), _(b'REV')),
+        (
+            b'A',
+            b'authormap',
+            b'',
+            _(b'remap usernames using this file'),
+            _(b'FILE'),
         ),
         (
-            '',
-            'full',
-            None,
-            _('apply filemap changes by converting all files again'),
+            b'',
+            b'filemap',
+            b'',
+            _(b'remap file names using contents of file'),
+            _(b'FILE'),
         ),
         (
-            '',
-            'splicemap',
-            '',
-            _('splice synthesized history into place'),
-            _('FILE'),
+            b'',
+            b'full',
+            None,
+            _(b'apply filemap changes by converting all files again'),
         ),
         (
-            '',
-            'branchmap',
-            '',
-            _('change branch names while converting'),
-            _('FILE'),
+            b'',
+            b'splicemap',
+            b'',
+            _(b'splice synthesized history into place'),
+            _(b'FILE'),
         ),
-        ('', 'branchsort', None, _('try to sort changesets by branches')),
-        ('', 'datesort', None, _('try to sort changesets by date')),
-        ('', 'sourcesort', None, _('preserve source changesets order')),
-        ('', 'closesort', None, _('try to reorder closed revisions')),
+        (
+            b'',
+            b'branchmap',
+            b'',
+            _(b'change branch names while converting'),
+            _(b'FILE'),
+        ),
+        (b'', b'branchsort', None, _(b'try to sort changesets by branches')),
+        (b'', b'datesort', None, _(b'try to sort changesets by date')),
+        (b'', b'sourcesort', None, _(b'preserve source changesets order')),
+        (b'', b'closesort', None, _(b'try to reorder closed revisions')),
     ],
-    _('hg convert [OPTION]... SOURCE [DEST [REVMAP]]'),
+    _(b'hg convert [OPTION]... SOURCE [DEST [REVMAP]]'),
     norepo=True,
 )
 def convert(ui, src, dest=None, revmapfile=None, **opts):
@@ -483,34 +495,44 @@
     return convcmd.convert(ui, src, dest, revmapfile, **opts)
 
 
-@command('debugsvnlog', [], 'hg debugsvnlog', norepo=True)
+@command(b'debugsvnlog', [], b'hg debugsvnlog', norepo=True)
 def debugsvnlog(ui, **opts):
     return subversion.debugsvnlog(ui, **opts)
 
 
 @command(
-    'debugcvsps',
+    b'debugcvsps',
     [
         # Main options shared with cvsps-2.1
-        ('b', 'branches', [], _('only return changes on specified branches')),
-        ('p', 'prefix', '', _('prefix to remove from file names')),
         (
-            'r',
-            'revisions',
+            b'b',
+            b'branches',
             [],
-            _('only return changes after or between specified tags'),
+            _(b'only return changes on specified branches'),
+        ),
+        (b'p', b'prefix', b'', _(b'prefix to remove from file names')),
+        (
+            b'r',
+            b'revisions',
+            [],
+            _(b'only return changes after or between specified tags'),
         ),
-        ('u', 'update-cache', None, _("update cvs log cache")),
-        ('x', 'new-cache', None, _("create new cvs log cache")),
-        ('z', 'fuzz', 60, _('set commit time fuzz in seconds')),
-        ('', 'root', '', _('specify cvsroot')),
+        (b'u', b'update-cache', None, _(b"update cvs log cache")),
+        (b'x', b'new-cache', None, _(b"create new cvs log cache")),
+        (b'z', b'fuzz', 60, _(b'set commit time fuzz in seconds')),
+        (b'', b'root', b'', _(b'specify cvsroot')),
         # Options specific to builtin cvsps
-        ('', 'parents', '', _('show parent changesets')),
-        ('', 'ancestors', '', _('show current changeset in ancestor branches')),
+        (b'', b'parents', b'', _(b'show parent changesets')),
+        (
+            b'',
+            b'ancestors',
+            b'',
+            _(b'show current changeset in ancestor branches'),
+        ),
         # Options that are ignored for compatibility with cvsps-2.1
-        ('A', 'cvs-direct', None, _('ignored for compatibility')),
+        (b'A', b'cvs-direct', None, _(b'ignored for compatibility')),
     ],
-    _('hg debugcvsps [OPTION]... [PATH]...'),
+    _(b'hg debugcvsps [OPTION]... [PATH]...'),
     norepo=True,
 )
 def debugcvsps(ui, *args, **opts):
@@ -528,14 +550,14 @@
 
 
 def kwconverted(context, mapping, name):
-    ctx = context.resource(mapping, 'ctx')
-    rev = ctx.extra().get('convert_revision', '')
-    if rev.startswith('svn:'):
-        if name == 'svnrev':
+    ctx = context.resource(mapping, b'ctx')
+    rev = ctx.extra().get(b'convert_revision', b'')
+    if rev.startswith(b'svn:'):
+        if name == b'svnrev':
             return b"%d" % subversion.revsplit(rev)[2]
-        elif name == 'svnpath':
+        elif name == b'svnpath':
             return subversion.revsplit(rev)[1]
-        elif name == 'svnuuid':
+        elif name == b'svnuuid':
             return subversion.revsplit(rev)[0]
     return rev
 
@@ -543,22 +565,22 @@
 templatekeyword = registrar.templatekeyword()
 
 
-@templatekeyword('svnrev', requires={'ctx'})
+@templatekeyword(b'svnrev', requires={b'ctx'})
 def kwsvnrev(context, mapping):
     """String. Converted subversion revision number."""
-    return kwconverted(context, mapping, 'svnrev')
+    return kwconverted(context, mapping, b'svnrev')
 
 
-@templatekeyword('svnpath', requires={'ctx'})
+@templatekeyword(b'svnpath', requires={b'ctx'})
 def kwsvnpath(context, mapping):
     """String. Converted subversion revision project path."""
-    return kwconverted(context, mapping, 'svnpath')
+    return kwconverted(context, mapping, b'svnpath')
 
 
-@templatekeyword('svnuuid', requires={'ctx'})
+@templatekeyword(b'svnuuid', requires={b'ctx'})
 def kwsvnuuid(context, mapping):
     """String. Converted subversion revision repository identifier."""
-    return kwconverted(context, mapping, 'svnuuid')
+    return kwconverted(context, mapping, b'svnuuid')
 
 
 # tell hggettext to extract docstrings from these functions:
--- a/hgext/convert/bzr.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/convert/bzr.py	Sun Oct 06 09:48:39 2019 -0400
@@ -17,7 +17,7 @@
 
 # these do not work with demandimport, blacklist
 demandimport.IGNORES.update(
-    ['bzrlib.transactions', 'bzrlib.urlutils', 'ElementPath',]
+    [b'bzrlib.transactions', b'bzrlib.urlutils', b'ElementPath',]
 )
 
 try:
@@ -35,7 +35,7 @@
 except ImportError:
     pass
 
-supportedkinds = ('file', 'symlink')
+supportedkinds = (b'file', b'symlink')
 
 
 class bzr_source(common.converter_source):
@@ -44,16 +44,16 @@
     def __init__(self, ui, repotype, path, revs=None):
         super(bzr_source, self).__init__(ui, repotype, path, revs=revs)
 
-        if not os.path.exists(os.path.join(path, '.bzr')):
+        if not os.path.exists(os.path.join(path, b'.bzr')):
             raise common.NoRepo(
-                _('%s does not look like a Bazaar repository') % path
+                _(b'%s does not look like a Bazaar repository') % path
             )
 
         try:
             # access bzrlib stuff
             bzrdir
         except NameError:
-            raise common.NoRepo(_('Bazaar modules could not be loaded'))
+            raise common.NoRepo(_(b'Bazaar modules could not be loaded'))
 
         path = os.path.abspath(path)
         self._checkrepotype(path)
@@ -61,10 +61,10 @@
             self.sourcerepo = bzrdir.BzrDir.open(path).open_repository()
         except errors.NoRepositoryPresent:
             raise common.NoRepo(
-                _('%s does not look like a Bazaar repository') % path
+                _(b'%s does not look like a Bazaar repository') % path
             )
         self._parentids = {}
-        self._saverev = ui.configbool('convert', 'bzr.saverev')
+        self._saverev = ui.configbool(b'convert', b'bzr.saverev')
 
     def _checkrepotype(self, path):
         # Lightweight checkouts detection is informational but probably
@@ -84,13 +84,13 @@
             ):
                 self.ui.warn(
                     _(
-                        'warning: lightweight checkouts may cause '
-                        'conversion failures, try with a regular '
-                        'branch instead.\n'
+                        b'warning: lightweight checkouts may cause '
+                        b'conversion failures, try with a regular '
+                        b'branch instead.\n'
                     )
                 )
         except Exception:
-            self.ui.note(_('bzr source type could not be determined\n'))
+            self.ui.note(_(b'bzr source type could not be determined\n'))
 
     def before(self):
         """Before the conversion begins, acquire a read lock
@@ -126,16 +126,16 @@
                 revid = info.rev_id
             if revid is None:
                 raise error.Abort(
-                    _('%s is not a valid revision') % self.revs[0]
+                    _(b'%s is not a valid revision') % self.revs[0]
                 )
             heads = [revid]
         # Empty repositories return 'null:', which cannot be retrieved
-        heads = [h for h in heads if h != 'null:']
+        heads = [h for h in heads if h != b'null:']
         return heads
 
     def getfile(self, name, rev):
         revtree = self.sourcerepo.revision_tree(rev)
-        fileid = revtree.path2id(name.decode(self.encoding or 'utf-8'))
+        fileid = revtree.path2id(name.decode(self.encoding or b'utf-8'))
         kind = None
         if fileid is not None:
             kind = revtree.kind(fileid)
@@ -143,11 +143,11 @@
             # the file is not available anymore - was deleted
             return None, None
         mode = self._modecache[(name, rev)]
-        if kind == 'symlink':
+        if kind == b'symlink':
             target = revtree.get_symlink_target(fileid)
             if target is None:
                 raise error.Abort(
-                    _('%s.%s symlink has no target') % (name, rev)
+                    _(b'%s.%s symlink has no target') % (name, rev)
                 )
             return target, mode
         else:
@@ -156,7 +156,7 @@
 
     def getchanges(self, version, full):
         if full:
-            raise error.Abort(_("convert from cvs does not support --full"))
+            raise error.Abort(_(b"convert from cvs does not support --full"))
         self._modecache = {}
         self._revtree = self.sourcerepo.revision_tree(version)
         # get the parentids from the cache
@@ -176,12 +176,12 @@
             parents = self._filterghosts(rev.parent_ids)
             self._parentids[version] = parents
 
-        branch = self.recode(rev.properties.get('branch-nick', u'default'))
-        if branch == 'trunk':
-            branch = 'default'
+        branch = self.recode(rev.properties.get(b'branch-nick', u'default'))
+        if branch == b'trunk':
+            branch = b'default'
         return common.commit(
             parents=parents,
-            date='%d %d' % (rev.timestamp, -rev.timezone),
+            date=b'%d %d' % (rev.timestamp, -rev.timezone),
             author=self.recode(rev.committer),
             desc=self.recode(rev.message),
             branch=branch,
@@ -248,13 +248,13 @@
 
             # bazaar tracks directories, mercurial does not, so
             # we have to rename the directory contents
-            if kind[1] == 'directory':
-                if kind[0] not in (None, 'directory'):
+            if kind[1] == b'directory':
+                if kind[0] not in (None, b'directory'):
                     # Replacing 'something' with a directory, record it
                     # so it can be removed.
                     changes.append((self.recode(paths[0]), revid))
 
-                if kind[0] == 'directory' and None not in paths:
+                if kind[0] == b'directory' and None not in paths:
                     renaming = paths[0] != paths[1]
                     # neither an add nor an delete - a move
                     # rename all directory contents manually
@@ -262,9 +262,9 @@
                     # get all child-entries of the directory
                     for name, entry in inventory.iter_entries(subdir):
                         # hg does not track directory renames
-                        if entry.kind == 'directory':
+                        if entry.kind == b'directory':
                             continue
-                        frompath = self.recode(paths[0] + '/' + name)
+                        frompath = self.recode(paths[0] + b'/' + name)
                         if frompath in seen:
                             # Already handled by a more specific change entry
                             # This is important when you have:
@@ -275,15 +275,15 @@
                         seen.add(frompath)
                         if not renaming:
                             continue
-                        topath = self.recode(paths[1] + '/' + name)
+                        topath = self.recode(paths[1] + b'/' + name)
                         # register the files as changed
                         changes.append((frompath, revid))
                         changes.append((topath, revid))
                         # add to mode cache
                         mode = (
-                            (entry.executable and 'x')
-                            or (entry.kind == 'symlink' and 's')
-                            or ''
+                            (entry.executable and b'x')
+                            or (entry.kind == b'symlink' and b's')
+                            or b''
                         )
                         self._modecache[(topath, revid)] = mode
                         # register the change as move
@@ -312,7 +312,7 @@
 
             # populate the mode cache
             kind, executable = [e[1] for e in (kind, executable)]
-            mode = (executable and 'x') or (kind == 'symlink' and 'l') or ''
+            mode = (executable and b'x') or (kind == b'symlink' and b'l') or b''
             self._modecache[(topath, revid)] = mode
             changes.append((topath, revid))
 
--- a/hgext/convert/common.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/convert/common.py	Sun Oct 06 09:48:39 2019 -0400
@@ -46,7 +46,7 @@
 
     @property
     def infile(self):
-        return self._l.infile or '<unknown>'
+        return self._l.infile or b'<unknown>'
 
     @property
     def lineno(self):
@@ -56,13 +56,13 @@
 def shlexer(data=None, filepath=None, wordchars=None, whitespace=None):
     if data is None:
         if pycompat.ispy3:
-            data = open(filepath, 'r', encoding=r'latin1')
+            data = open(filepath, b'r', encoding=r'latin1')
         else:
-            data = open(filepath, 'r')
+            data = open(filepath, b'r')
     else:
         if filepath is not None:
             raise error.ProgrammingError(
-                'shlexer only accepts data or filepath, not both'
+                b'shlexer only accepts data or filepath, not both'
             )
         if pycompat.ispy3:
             data = data.decode('latin1')
@@ -87,7 +87,7 @@
     def encodearg(s):
         lines = base64.encodestring(s)
         lines = [l.splitlines()[0] for l in lines]
-        return ''.join(lines)
+        return b''.join(lines)
 
     s = pickle.dumps(args)
     return encodearg(s)
@@ -109,14 +109,14 @@
             exc = error.Abort
         else:
             exc = MissingTool
-        raise exc(_('cannot find required "%s" tool') % name)
+        raise exc(_(b'cannot find required "%s" tool') % name)
 
 
 class NoRepo(Exception):
     pass
 
 
-SKIPREV = 'SKIP'
+SKIPREV = b'SKIP'
 
 
 class commit(object):
@@ -135,8 +135,8 @@
         optparents=None,
         ctx=None,
     ):
-        self.author = author or 'unknown'
-        self.date = date or '0 0'
+        self.author = author or b'unknown'
+        self.date = date or b'0 0'
         self.desc = desc
         self.parents = parents  # will be converted and used as parents
         self.optparents = optparents or []  # will be used if already converted
@@ -160,15 +160,15 @@
         self.revs = revs
         self.repotype = repotype
 
-        self.encoding = 'utf-8'
+        self.encoding = b'utf-8'
 
-    def checkhexformat(self, revstr, mapname='splicemap'):
+    def checkhexformat(self, revstr, mapname=b'splicemap'):
         """ fails if revstr is not a 40 byte hex. mercurial and git both uses
             such format for their revision numbering
         """
         if not re.match(br'[0-9a-fA-F]{40,40}$', revstr):
             raise error.Abort(
-                _('%s entry %s is not a valid revision' ' identifier')
+                _(b'%s entry %s is not a valid revision' b' identifier')
                 % (mapname, revstr)
             )
 
@@ -236,7 +236,7 @@
 
     def recode(self, s, encoding=None):
         if not encoding:
-            encoding = self.encoding or 'utf-8'
+            encoding = self.encoding or b'utf-8'
 
         if isinstance(s, pycompat.unicode):
             return s.encode("utf-8")
@@ -292,7 +292,7 @@
         """
         return {}
 
-    def checkrevformat(self, revstr, mapname='splicemap'):
+    def checkrevformat(self, revstr, mapname=b'splicemap'):
         """revstr is a string that describes a revision in the given
            source control system.  Return true if revstr has correct
            format.
@@ -412,20 +412,20 @@
         cmdline = [self.command, cmd] + list(args)
         for k, v in kwargs.iteritems():
             if len(k) == 1:
-                cmdline.append('-' + k)
+                cmdline.append(b'-' + k)
             else:
-                cmdline.append('--' + k.replace('_', '-'))
+                cmdline.append(b'--' + k.replace(b'_', b'-'))
             try:
                 if len(k) == 1:
-                    cmdline.append('' + v)
+                    cmdline.append(b'' + v)
                 else:
-                    cmdline[-1] += '=' + v
+                    cmdline[-1] += b'=' + v
             except TypeError:
                 pass
         cmdline = [procutil.shellquote(arg) for arg in cmdline]
         if not self.ui.debugflag:
-            cmdline += ['2>', pycompat.bytestr(os.devnull)]
-        cmdline = ' '.join(cmdline)
+            cmdline += [b'2>', pycompat.bytestr(os.devnull)]
+        cmdline = b' '.join(cmdline)
         return cmdline
 
     def _run(self, cmd, *args, **kwargs):
@@ -449,7 +449,7 @@
 
     def _dorun(self, openfunc, cmd, *args, **kwargs):
         cmdline = self._cmdline(cmd, *args, **kwargs)
-        self.ui.debug('running: %s\n' % (cmdline,))
+        self.ui.debug(b'running: %s\n' % (cmdline,))
         self.prerun()
         try:
             return openfunc(cmdline)
@@ -466,16 +466,16 @@
         p = self._run(cmd, *args, **kwargs)
         output = p.stdout.readlines()
         p.wait()
-        self.ui.debug(''.join(output))
+        self.ui.debug(b''.join(output))
         return output, p.returncode
 
-    def checkexit(self, status, output=''):
+    def checkexit(self, status, output=b''):
         if status:
             if output:
-                self.ui.warn(_('%s error:\n') % self.command)
+                self.ui.warn(_(b'%s error:\n') % self.command)
                 self.ui.warn(output)
             msg = procutil.explainexit(status)
-            raise error.Abort('%s %s' % (self.command, msg))
+            raise error.Abort(b'%s %s' % (self.command, msg))
 
     def run0(self, cmd, *args, **kwargs):
         output, status = self.run(cmd, *args, **kwargs)
@@ -484,7 +484,7 @@
 
     def runlines0(self, cmd, *args, **kwargs):
         output, status = self.runlines(cmd, *args, **kwargs)
-        self.checkexit(status, ''.join(output))
+        self.checkexit(status, b''.join(output))
         return output
 
     @propertycache
@@ -540,7 +540,7 @@
         if not self.path:
             return
         try:
-            fp = open(self.path, 'rb')
+            fp = open(self.path, b'rb')
         except IOError as err:
             if err.errno != errno.ENOENT:
                 raise
@@ -551,10 +551,10 @@
                 # Ignore blank lines
                 continue
             try:
-                key, value = line.rsplit(' ', 1)
+                key, value = line.rsplit(b' ', 1)
             except ValueError:
                 raise error.Abort(
-                    _('syntax error in %s(%d): key/value pair expected')
+                    _(b'syntax error in %s(%d): key/value pair expected')
                     % (self.path, i + 1)
                 )
             if key not in self:
@@ -565,13 +565,13 @@
     def __setitem__(self, key, value):
         if self.fp is None:
             try:
-                self.fp = open(self.path, 'ab')
+                self.fp = open(self.path, b'ab')
             except IOError as err:
                 raise error.Abort(
-                    _('could not open map file %r: %s')
+                    _(b'could not open map file %r: %s')
                     % (self.path, encoding.strtolocal(err.strerror))
                 )
-        self.fp.write(util.tonativeeol('%s %s\n' % (key, value)))
+        self.fp.write(util.tonativeeol(b'%s %s\n' % (key, value)))
         self.fp.flush()
         super(mapfile, self).__setitem__(key, value)
 
--- a/hgext/convert/convcmd.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/convert/convcmd.py	Sun Oct 06 09:48:39 2019 -0400
@@ -52,7 +52,7 @@
 svn_sink = subversion.svn_sink
 svn_source = subversion.svn_source
 
-orig_encoding = 'ascii'
+orig_encoding = b'ascii'
 
 
 def recode(s):
@@ -90,36 +90,36 @@
     # destination repository. For such commits, using a literal "default"
     # in branchmap below allows the user to map "default" to an alternate
     # default branch in the destination repository.
-    branch = branchmap.get(branch or 'default', branch)
+    branch = branchmap.get(branch or b'default', branch)
     # At some point we used "None" literal to denote the default branch,
     # attempt to use that for backward compatibility.
     if not branch:
-        branch = branchmap.get('None', branch)
+        branch = branchmap.get(b'None', branch)
     return branch
 
 
 source_converters = [
-    ('cvs', convert_cvs, 'branchsort'),
-    ('git', convert_git, 'branchsort'),
-    ('svn', svn_source, 'branchsort'),
-    ('hg', mercurial_source, 'sourcesort'),
-    ('darcs', darcs_source, 'branchsort'),
-    ('mtn', monotone_source, 'branchsort'),
-    ('gnuarch', gnuarch_source, 'branchsort'),
-    ('bzr', bzr_source, 'branchsort'),
-    ('p4', p4_source, 'branchsort'),
+    (b'cvs', convert_cvs, b'branchsort'),
+    (b'git', convert_git, b'branchsort'),
+    (b'svn', svn_source, b'branchsort'),
+    (b'hg', mercurial_source, b'sourcesort'),
+    (b'darcs', darcs_source, b'branchsort'),
+    (b'mtn', monotone_source, b'branchsort'),
+    (b'gnuarch', gnuarch_source, b'branchsort'),
+    (b'bzr', bzr_source, b'branchsort'),
+    (b'p4', p4_source, b'branchsort'),
 ]
 
 sink_converters = [
-    ('hg', mercurial_sink),
-    ('svn', svn_sink),
+    (b'hg', mercurial_sink),
+    (b'svn', svn_sink),
 ]
 
 
 def convertsource(ui, path, type, revs):
     exceptions = []
     if type and type not in [s[0] for s in source_converters]:
-        raise error.Abort(_('%s: invalid source repository type') % type)
+        raise error.Abort(_(b'%s: invalid source repository type') % type)
     for name, source, sortmode in source_converters:
         try:
             if not type or name == type:
@@ -128,22 +128,22 @@
             exceptions.append(inst)
     if not ui.quiet:
         for inst in exceptions:
-            ui.write("%s\n" % pycompat.bytestr(inst.args[0]))
-    raise error.Abort(_('%s: missing or unsupported repository') % path)
+            ui.write(b"%s\n" % pycompat.bytestr(inst.args[0]))
+    raise error.Abort(_(b'%s: missing or unsupported repository') % path)
 
 
 def convertsink(ui, path, type):
     if type and type not in [s[0] for s in sink_converters]:
-        raise error.Abort(_('%s: invalid destination repository type') % type)
+        raise error.Abort(_(b'%s: invalid destination repository type') % type)
     for name, sink in sink_converters:
         try:
             if not type or name == type:
                 return sink(ui, name, path)
         except NoRepo as inst:
-            ui.note(_("convert: %s\n") % inst)
+            ui.note(_(b"convert: %s\n") % inst)
         except MissingTool as inst:
-            raise error.Abort('%s\n' % inst)
-    raise error.Abort(_('%s: unknown repository type') % path)
+            raise error.Abort(b'%s\n' % inst)
+    raise error.Abort(_(b'%s: unknown repository type') % path)
 
 
 class progresssource(object):
@@ -151,7 +151,7 @@
         self.ui = ui
         self.source = source
         self.progress = ui.makeprogress(
-            _('getting files'), unit=_('files'), total=filecount
+            _(b'getting files'), unit=_(b'files'), total=filecount
         )
 
     def getfile(self, file, rev):
@@ -189,12 +189,12 @@
         if authorfile and os.path.exists(authorfile):
             self.readauthormap(authorfile)
         # Extend/Override with new author map if necessary
-        if opts.get('authormap'):
-            self.readauthormap(opts.get('authormap'))
+        if opts.get(b'authormap'):
+            self.readauthormap(opts.get(b'authormap'))
             self.authorfile = self.dest.authorfile()
 
-        self.splicemap = self.parsesplicemap(opts.get('splicemap'))
-        self.branchmap = mapfile(ui, opts.get('branchmap'))
+        self.splicemap = self.parsesplicemap(opts.get(b'splicemap'))
+        self.branchmap = mapfile(ui, opts.get(b'branchmap'))
 
     def parsesplicemap(self, path):
         """ check and validate the splicemap format and
@@ -211,21 +211,21 @@
             return {}
         m = {}
         try:
-            fp = open(path, 'rb')
+            fp = open(path, b'rb')
             for i, line in enumerate(util.iterfile(fp)):
                 line = line.splitlines()[0].rstrip()
                 if not line:
                     # Ignore blank lines
                     continue
                 # split line
-                lex = common.shlexer(data=line, whitespace=',')
+                lex = common.shlexer(data=line, whitespace=b',')
                 line = list(lex)
                 # check number of parents
                 if not (2 <= len(line) <= 3):
                     raise error.Abort(
                         _(
-                            'syntax error in %s(%d): child parent1'
-                            '[,parent2] expected'
+                            b'syntax error in %s(%d): child parent1'
+                            b'[,parent2] expected'
                         )
                         % (path, i + 1)
                     )
@@ -239,7 +239,7 @@
         # if file does not exist or error reading, exit
         except IOError:
             raise error.Abort(
-                _('splicemap file not found or error reading %s:') % path
+                _(b'splicemap file not found or error reading %s:') % path
             )
         return m
 
@@ -251,7 +251,7 @@
         parents = {}
         numcommits = self.source.numcommits()
         progress = self.ui.makeprogress(
-            _('scanning'), unit=_('revisions'), total=numcommits
+            _(b'scanning'), unit=_(b'revisions'), total=numcommits
         )
         while visit:
             n = visit.pop(0)
@@ -283,8 +283,8 @@
                     # Could be in source but not converted during this run
                     self.ui.warn(
                         _(
-                            'splice map revision %s is not being '
-                            'converted, ignoring\n'
+                            b'splice map revision %s is not being '
+                            b'converted, ignoring\n'
                         )
                         % c
                     )
@@ -296,7 +296,7 @@
                     continue
                 # Parent is not in dest and not being converted, not good
                 if p not in parents:
-                    raise error.Abort(_('unknown splice map parent: %s') % p)
+                    raise error.Abort(_(b'unknown splice map parent: %s') % p)
                 pc.append(p)
             parents[c] = pc
 
@@ -369,7 +369,7 @@
         def makeclosesorter():
             """Close order sort."""
             keyfn = lambda n: (
-                'close' not in self.commitcache[n].extra,
+                b'close' not in self.commitcache[n].extra,
                 self.commitcache[n].sortkey,
             )
 
@@ -392,16 +392,16 @@
 
             return picknext
 
-        if sortmode == 'branchsort':
+        if sortmode == b'branchsort':
             picknext = makebranchsorter()
-        elif sortmode == 'datesort':
+        elif sortmode == b'datesort':
             picknext = makedatesorter()
-        elif sortmode == 'sourcesort':
+        elif sortmode == b'sourcesort':
             picknext = makesourcesorter()
-        elif sortmode == 'closesort':
+        elif sortmode == b'closesort':
             picknext = makeclosesorter()
         else:
-            raise error.Abort(_('unknown sort mode: %s') % sortmode)
+            raise error.Abort(_(b'unknown sort mode: %s') % sortmode)
 
         children, actives = mapchildren(parents)
 
@@ -420,7 +420,7 @@
                     pendings[c].remove(n)
                 except ValueError:
                     raise error.Abort(
-                        _('cycle detected between %s and %s')
+                        _(b'cycle detected between %s and %s')
                         % (recode(c), recode(n))
                     )
                 if not pendings[c]:
@@ -429,45 +429,47 @@
                     pendings[c] = None
 
         if len(s) != len(parents):
-            raise error.Abort(_("not all revisions were sorted"))
+            raise error.Abort(_(b"not all revisions were sorted"))
 
         return s
 
     def writeauthormap(self):
         authorfile = self.authorfile
         if authorfile:
-            self.ui.status(_('writing author map file %s\n') % authorfile)
-            ofile = open(authorfile, 'wb+')
+            self.ui.status(_(b'writing author map file %s\n') % authorfile)
+            ofile = open(authorfile, b'wb+')
             for author in self.authors:
                 ofile.write(
-                    util.tonativeeol("%s=%s\n" % (author, self.authors[author]))
+                    util.tonativeeol(
+                        b"%s=%s\n" % (author, self.authors[author])
+                    )
                 )
             ofile.close()
 
     def readauthormap(self, authorfile):
-        afile = open(authorfile, 'rb')
+        afile = open(authorfile, b'rb')
         for line in afile:
 
             line = line.strip()
-            if not line or line.startswith('#'):
+            if not line or line.startswith(b'#'):
                 continue
 
             try:
-                srcauthor, dstauthor = line.split('=', 1)
+                srcauthor, dstauthor = line.split(b'=', 1)
             except ValueError:
-                msg = _('ignoring bad line in author map file %s: %s\n')
+                msg = _(b'ignoring bad line in author map file %s: %s\n')
                 self.ui.warn(msg % (authorfile, line.rstrip()))
                 continue
 
             srcauthor = srcauthor.strip()
             dstauthor = dstauthor.strip()
             if self.authors.get(srcauthor) in (None, dstauthor):
-                msg = _('mapping author %s to %s\n')
+                msg = _(b'mapping author %s to %s\n')
                 self.ui.debug(msg % (srcauthor, dstauthor))
                 self.authors[srcauthor] = dstauthor
                 continue
 
-            m = _('overriding mapping for author %s, was %s, will be %s\n')
+            m = _(b'overriding mapping for author %s, was %s, will be %s\n')
             self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
 
         afile.close()
@@ -481,7 +483,7 @@
 
     def copy(self, rev):
         commit = self.commitcache[rev]
-        full = self.opts.get('full')
+        full = self.opts.get(b'full')
         changes = self.source.getchanges(rev, full)
         if isinstance(changes, bytes):
             if changes == SKIPREV:
@@ -503,8 +505,8 @@
         try:
             parents = self.splicemap[rev]
             self.ui.status(
-                _('spliced in %s as parents of %s\n')
-                % (_(' and ').join(parents), rev)
+                _(b'spliced in %s as parents of %s\n')
+                % (_(b' and ').join(parents), rev)
             )
             parents = [self.map.get(p, p) for p in parents]
         except KeyError:
@@ -536,34 +538,34 @@
             self.source.before()
             self.dest.before()
             self.source.setrevmap(self.map)
-            self.ui.status(_("scanning source...\n"))
+            self.ui.status(_(b"scanning source...\n"))
             heads = self.source.getheads()
             parents = self.walktree(heads)
             self.mergesplicemap(parents, self.splicemap)
-            self.ui.status(_("sorting...\n"))
+            self.ui.status(_(b"sorting...\n"))
             t = self.toposort(parents, sortmode)
             num = len(t)
             c = None
 
-            self.ui.status(_("converting...\n"))
+            self.ui.status(_(b"converting...\n"))
             progress = self.ui.makeprogress(
-                _('converting'), unit=_('revisions'), total=len(t)
+                _(b'converting'), unit=_(b'revisions'), total=len(t)
             )
             for i, c in enumerate(t):
                 num -= 1
                 desc = self.commitcache[c].desc
-                if "\n" in desc:
+                if b"\n" in desc:
                     desc = desc.splitlines()[0]
                 # convert log message to local encoding without using
                 # tolocal() because the encoding.encoding convert()
                 # uses is 'utf-8'
-                self.ui.status("%d %s\n" % (num, recode(desc)))
-                self.ui.note(_("source: %s\n") % recode(c))
+                self.ui.status(b"%d %s\n" % (num, recode(desc)))
+                self.ui.note(_(b"source: %s\n") % recode(c))
                 progress.update(i)
                 self.copy(c)
             progress.complete()
 
-            if not self.ui.configbool('convert', 'skiptags'):
+            if not self.ui.configbool(b'convert', b'skiptags'):
                 tags = self.source.gettags()
                 ctags = {}
                 for k in tags:
@@ -610,45 +612,47 @@
     opts = pycompat.byteskwargs(opts)
     global orig_encoding
     orig_encoding = encoding.encoding
-    encoding.encoding = 'UTF-8'
+    encoding.encoding = b'UTF-8'
 
     # support --authors as an alias for --authormap
-    if not opts.get('authormap'):
-        opts['authormap'] = opts.get('authors')
+    if not opts.get(b'authormap'):
+        opts[b'authormap'] = opts.get(b'authors')
 
     if not dest:
-        dest = hg.defaultdest(src) + "-hg"
-        ui.status(_("assuming destination %s\n") % dest)
+        dest = hg.defaultdest(src) + b"-hg"
+        ui.status(_(b"assuming destination %s\n") % dest)
 
-    destc = convertsink(ui, dest, opts.get('dest_type'))
+    destc = convertsink(ui, dest, opts.get(b'dest_type'))
     destc = scmutil.wrapconvertsink(destc)
 
     try:
         srcc, defaultsort = convertsource(
-            ui, src, opts.get('source_type'), opts.get('rev')
+            ui, src, opts.get(b'source_type'), opts.get(b'rev')
         )
     except Exception:
         for path in destc.created:
             shutil.rmtree(path, True)
         raise
 
-    sortmodes = ('branchsort', 'datesort', 'sourcesort', 'closesort')
+    sortmodes = (b'branchsort', b'datesort', b'sourcesort', b'closesort')
     sortmode = [m for m in sortmodes if opts.get(m)]
     if len(sortmode) > 1:
-        raise error.Abort(_('more than one sort mode specified'))
+        raise error.Abort(_(b'more than one sort mode specified'))
     if sortmode:
         sortmode = sortmode[0]
     else:
         sortmode = defaultsort
 
-    if sortmode == 'sourcesort' and not srcc.hasnativeorder():
+    if sortmode == b'sourcesort' and not srcc.hasnativeorder():
         raise error.Abort(
-            _('--sourcesort is not supported by this data source')
+            _(b'--sourcesort is not supported by this data source')
         )
-    if sortmode == 'closesort' and not srcc.hasnativeclose():
-        raise error.Abort(_('--closesort is not supported by this data source'))
+    if sortmode == b'closesort' and not srcc.hasnativeclose():
+        raise error.Abort(
+            _(b'--closesort is not supported by this data source')
+        )
 
-    fmap = opts.get('filemap')
+    fmap = opts.get(b'filemap')
     if fmap:
         srcc = filemap.filemap_source(ui, srcc, fmap)
         destc.setfilemapmode(True)
--- a/hgext/convert/cvs.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/convert/cvs.py	Sun Oct 06 09:48:39 2019 -0400
@@ -39,19 +39,19 @@
     def __init__(self, ui, repotype, path, revs=None):
         super(convert_cvs, self).__init__(ui, repotype, path, revs=revs)
 
-        cvs = os.path.join(path, "CVS")
+        cvs = os.path.join(path, b"CVS")
         if not os.path.exists(cvs):
-            raise NoRepo(_("%s does not look like a CVS checkout") % path)
+            raise NoRepo(_(b"%s does not look like a CVS checkout") % path)
 
-        checktool('cvs')
+        checktool(b'cvs')
 
         self.changeset = None
         self.files = {}
         self.tags = {}
         self.lastbranch = {}
         self.socket = None
-        self.cvsroot = open(os.path.join(cvs, "Root"), 'rb').read()[:-1]
-        self.cvsrepo = open(os.path.join(cvs, "Repository"), 'rb').read()[:-1]
+        self.cvsroot = open(os.path.join(cvs, b"Root"), b'rb').read()[:-1]
+        self.cvsrepo = open(os.path.join(cvs, b"Repository"), b'rb').read()[:-1]
         self.encoding = encoding.encoding
 
         self._connect()
@@ -65,7 +65,10 @@
         if self.revs:
             if len(self.revs) > 1:
                 raise error.Abort(
-                    _('cvs source does not support specifying ' 'multiple revs')
+                    _(
+                        b'cvs source does not support specifying '
+                        b'multiple revs'
+                    )
                 )
             # TODO: handle tags
             try:
@@ -73,23 +76,23 @@
                 maxrev = int(self.revs[0])
             except ValueError:
                 raise error.Abort(
-                    _('revision %s is not a patchset number') % self.revs[0]
+                    _(b'revision %s is not a patchset number') % self.revs[0]
                 )
 
         d = encoding.getcwd()
         try:
             os.chdir(self.path)
 
-            cache = 'update'
-            if not self.ui.configbool('convert', 'cvsps.cache'):
+            cache = b'update'
+            if not self.ui.configbool(b'convert', b'cvsps.cache'):
                 cache = None
             db = cvsps.createlog(self.ui, cache=cache)
             db = cvsps.createchangeset(
                 self.ui,
                 db,
-                fuzz=int(self.ui.config('convert', 'cvsps.fuzz')),
-                mergeto=self.ui.config('convert', 'cvsps.mergeto'),
-                mergefrom=self.ui.config('convert', 'cvsps.mergefrom'),
+                fuzz=int(self.ui.config(b'convert', b'cvsps.fuzz')),
+                mergeto=self.ui.config(b'convert', b'cvsps.mergeto'),
+                mergefrom=self.ui.config(b'convert', b'cvsps.mergefrom'),
             )
 
             for cs in db:
@@ -99,16 +102,16 @@
                 cs.author = self.recode(cs.author)
                 self.lastbranch[cs.branch] = id
                 cs.comment = self.recode(cs.comment)
-                if self.ui.configbool('convert', 'localtimezone'):
+                if self.ui.configbool(b'convert', b'localtimezone'):
                     cs.date = makedatetimestamp(cs.date[0])
-                date = dateutil.datestr(cs.date, '%Y-%m-%d %H:%M:%S %1%2')
+                date = dateutil.datestr(cs.date, b'%Y-%m-%d %H:%M:%S %1%2')
                 self.tags.update(dict.fromkeys(cs.tags, id))
 
                 files = {}
                 for f in cs.entries:
-                    files[f.file] = "%s%s" % (
-                        '.'.join([(b"%d" % x) for x in f.revision]),
-                        ['', '(DEAD)'][f.dead],
+                    files[f.file] = b"%s%s" % (
+                        b'.'.join([(b"%d" % x) for x in f.revision]),
+                        [b'', b'(DEAD)'][f.dead],
                     )
 
                 # add current commit to set
@@ -117,7 +120,7 @@
                     date=date,
                     parents=[(b"%d" % p.id) for p in cs.parents],
                     desc=cs.comment,
-                    branch=cs.branch or '',
+                    branch=cs.branch or b'',
                 )
                 self.changeset[id] = c
                 self.files[id] = files
@@ -130,38 +133,38 @@
         root = self.cvsroot
         conntype = None
         user, host = None, None
-        cmd = ['cvs', 'server']
+        cmd = [b'cvs', b'server']
 
-        self.ui.status(_("connecting to %s\n") % root)
+        self.ui.status(_(b"connecting to %s\n") % root)
 
-        if root.startswith(":pserver:"):
+        if root.startswith(b":pserver:"):
             root = root[9:]
             m = re.match(
                 r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)', root
             )
             if m:
-                conntype = "pserver"
+                conntype = b"pserver"
                 user, passw, serv, port, root = m.groups()
                 if not user:
-                    user = "anonymous"
+                    user = b"anonymous"
                 if not port:
                     port = 2401
                 else:
                     port = int(port)
-                format0 = ":pserver:%s@%s:%s" % (user, serv, root)
-                format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
+                format0 = b":pserver:%s@%s:%s" % (user, serv, root)
+                format1 = b":pserver:%s@%s:%d%s" % (user, serv, port, root)
 
                 if not passw:
-                    passw = "A"
-                    cvspass = os.path.expanduser("~/.cvspass")
+                    passw = b"A"
+                    cvspass = os.path.expanduser(b"~/.cvspass")
                     try:
-                        pf = open(cvspass, 'rb')
+                        pf = open(cvspass, b'rb')
                         for line in pf.read().splitlines():
-                            part1, part2 = line.split(' ', 1)
+                            part1, part2 = line.split(b' ', 1)
                             # /1 :pserver:user@example.com:2401/cvsroot/foo
                             # Ah<Z
-                            if part1 == '/1':
-                                part1, part2 = part2.split(' ', 1)
+                            if part1 == b'/1':
+                                part1, part2 = part2.split(b' ', 1)
                                 format = format1
                             # :pserver:user@example.com:/cvsroot/foo Ah<Z
                             else:
@@ -179,72 +182,72 @@
                 sck = socket.socket()
                 sck.connect((serv, port))
                 sck.send(
-                    "\n".join(
+                    b"\n".join(
                         [
-                            "BEGIN AUTH REQUEST",
+                            b"BEGIN AUTH REQUEST",
                             root,
                             user,
                             passw,
-                            "END AUTH REQUEST",
-                            "",
+                            b"END AUTH REQUEST",
+                            b"",
                         ]
                     )
                 )
-                if sck.recv(128) != "I LOVE YOU\n":
-                    raise error.Abort(_("CVS pserver authentication failed"))
+                if sck.recv(128) != b"I LOVE YOU\n":
+                    raise error.Abort(_(b"CVS pserver authentication failed"))
 
-                self.writep = self.readp = sck.makefile('r+')
+                self.writep = self.readp = sck.makefile(b'r+')
 
-        if not conntype and root.startswith(":local:"):
-            conntype = "local"
+        if not conntype and root.startswith(b":local:"):
+            conntype = b"local"
             root = root[7:]
 
         if not conntype:
             # :ext:user@host/home/user/path/to/cvsroot
-            if root.startswith(":ext:"):
+            if root.startswith(b":ext:"):
                 root = root[5:]
             m = re.match(br'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
             # Do not take Windows path "c:\foo\bar" for a connection strings
             if os.path.isdir(root) or not m:
-                conntype = "local"
+                conntype = b"local"
             else:
-                conntype = "rsh"
+                conntype = b"rsh"
                 user, host, root = m.group(1), m.group(2), m.group(3)
 
-        if conntype != "pserver":
-            if conntype == "rsh":
-                rsh = encoding.environ.get("CVS_RSH") or "ssh"
+        if conntype != b"pserver":
+            if conntype == b"rsh":
+                rsh = encoding.environ.get(b"CVS_RSH") or b"ssh"
                 if user:
-                    cmd = [rsh, '-l', user, host] + cmd
+                    cmd = [rsh, b'-l', user, host] + cmd
                 else:
                     cmd = [rsh, host] + cmd
 
             # popen2 does not support argument lists under Windows
             cmd = [procutil.shellquote(arg) for arg in cmd]
-            cmd = procutil.quotecommand(' '.join(cmd))
+            cmd = procutil.quotecommand(b' '.join(cmd))
             self.writep, self.readp = procutil.popen2(cmd)
 
         self.realroot = root
 
-        self.writep.write("Root %s\n" % root)
+        self.writep.write(b"Root %s\n" % root)
         self.writep.write(
-            "Valid-responses ok error Valid-requests Mode"
-            " M Mbinary E Checked-in Created Updated"
-            " Merged Removed\n"
+            b"Valid-responses ok error Valid-requests Mode"
+            b" M Mbinary E Checked-in Created Updated"
+            b" Merged Removed\n"
         )
-        self.writep.write("valid-requests\n")
+        self.writep.write(b"valid-requests\n")
         self.writep.flush()
         r = self.readp.readline()
-        if not r.startswith("Valid-requests"):
+        if not r.startswith(b"Valid-requests"):
             raise error.Abort(
                 _(
-                    'unexpected response from CVS server '
-                    '(expected "Valid-requests", but got %r)'
+                    b'unexpected response from CVS server '
+                    b'(expected "Valid-requests", but got %r)'
                 )
                 % r
             )
-        if "UseUnchanged" in r:
-            self.writep.write("UseUnchanged\n")
+        if b"UseUnchanged" in r:
+            self.writep.write(b"UseUnchanged\n")
             self.writep.flush()
             self.readp.readline()
 
@@ -262,55 +265,55 @@
                 data = fp.read(min(count, chunksize))
                 if not data:
                     raise error.Abort(
-                        _("%d bytes missing from remote file") % count
+                        _(b"%d bytes missing from remote file") % count
                     )
                 count -= len(data)
                 output.write(data)
             return output.getvalue()
 
         self._parse()
-        if rev.endswith("(DEAD)"):
+        if rev.endswith(b"(DEAD)"):
             return None, None
 
-        args = ("-N -P -kk -r %s --" % rev).split()
-        args.append(self.cvsrepo + '/' + name)
+        args = (b"-N -P -kk -r %s --" % rev).split()
+        args.append(self.cvsrepo + b'/' + name)
         for x in args:
-            self.writep.write("Argument %s\n" % x)
-        self.writep.write("Directory .\n%s\nco\n" % self.realroot)
+            self.writep.write(b"Argument %s\n" % x)
+        self.writep.write(b"Directory .\n%s\nco\n" % self.realroot)
         self.writep.flush()
 
-        data = ""
+        data = b""
         mode = None
         while True:
             line = self.readp.readline()
-            if line.startswith("Created ") or line.startswith("Updated "):
+            if line.startswith(b"Created ") or line.startswith(b"Updated "):
                 self.readp.readline()  # path
                 self.readp.readline()  # entries
                 mode = self.readp.readline()[:-1]
                 count = int(self.readp.readline()[:-1])
                 data = chunkedread(self.readp, count)
-            elif line.startswith(" "):
+            elif line.startswith(b" "):
                 data += line[1:]
-            elif line.startswith("M "):
+            elif line.startswith(b"M "):
                 pass
-            elif line.startswith("Mbinary "):
+            elif line.startswith(b"Mbinary "):
                 count = int(self.readp.readline()[:-1])
                 data = chunkedread(self.readp, count)
             else:
-                if line == "ok\n":
+                if line == b"ok\n":
                     if mode is None:
-                        raise error.Abort(_('malformed response from CVS'))
-                    return (data, "x" in mode and "x" or "")
-                elif line.startswith("E "):
-                    self.ui.warn(_("cvs server: %s\n") % line[2:])
-                elif line.startswith("Remove"):
+                        raise error.Abort(_(b'malformed response from CVS'))
+                    return (data, b"x" in mode and b"x" or b"")
+                elif line.startswith(b"E "):
+                    self.ui.warn(_(b"cvs server: %s\n") % line[2:])
+                elif line.startswith(b"Remove"):
                     self.readp.readline()
                 else:
-                    raise error.Abort(_("unknown CVS response: %s") % line)
+                    raise error.Abort(_(b"unknown CVS response: %s") % line)
 
     def getchanges(self, rev, full):
         if full:
-            raise error.Abort(_("convert from cvs does not support --full"))
+            raise error.Abort(_(b"convert from cvs does not support --full"))
         self._parse()
         return sorted(self.files[rev].iteritems()), {}, set()
 
--- a/hgext/convert/cvsps.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/convert/cvsps.py	Sun Oct 06 09:48:39 2019 -0400
@@ -92,18 +92,18 @@
     # of the '/' char after the '@' is located. The solution is the rest of the
     # string after that '/' sign including it
 
-    parts = cvspath.split(':')
-    atposition = parts[-1].find('@')
+    parts = cvspath.split(b':')
+    atposition = parts[-1].find(b'@')
     start = 0
 
     if atposition != -1:
         start = atposition
 
-    repopath = parts[-1][parts[-1].find('/', start) :]
+    repopath = parts[-1][parts[-1].find(b'/', start) :]
     return repopath
 
 
-def createlog(ui, directory=None, root="", rlog=True, cache=None):
+def createlog(ui, directory=None, root=b"", rlog=True, cache=None):
     '''Collect the CVS rlog'''
 
     # Because we store many duplicate commit log messages, reusing strings
@@ -111,10 +111,10 @@
     _scache = {}
 
     def scache(s):
-        "return a shared version of a string"
+        b"return a shared version of a string"
         return _scache.setdefault(s, s)
 
-    ui.status(_('collecting CVS rlog\n'))
+    ui.status(_(b'collecting CVS rlog\n'))
 
     log = []  # list of logentry objects containing the CVS state
 
@@ -144,39 +144,39 @@
 
     file_added_re = re.compile(br'file [^/]+ was (initially )?added on branch')
 
-    prefix = ''  # leading path to strip of what we get from CVS
+    prefix = b''  # leading path to strip of what we get from CVS
 
     if directory is None:
         # Current working directory
 
         # Get the real directory in the repository
         try:
-            with open(os.path.join(b'CVS', b'Repository'), 'rb') as f:
+            with open(os.path.join(b'CVS', b'Repository'), b'rb') as f:
                 prefix = f.read().strip()
             directory = prefix
-            if prefix == ".":
-                prefix = ""
+            if prefix == b".":
+                prefix = b""
         except IOError:
-            raise logerror(_('not a CVS sandbox'))
+            raise logerror(_(b'not a CVS sandbox'))
 
         if prefix and not prefix.endswith(pycompat.ossep):
             prefix += pycompat.ossep
 
         # Use the Root file in the sandbox, if it exists
         try:
-            root = open(os.path.join('CVS', 'Root'), 'rb').read().strip()
+            root = open(os.path.join(b'CVS', b'Root'), b'rb').read().strip()
         except IOError:
             pass
 
     if not root:
-        root = encoding.environ.get('CVSROOT', '')
+        root = encoding.environ.get(b'CVSROOT', b'')
 
     # read log cache if one exists
     oldlog = []
     date = None
 
     if cache:
-        cachedir = os.path.expanduser('~/.hg.cvsps')
+        cachedir = os.path.expanduser(b'~/.hg.cvsps')
         if not os.path.exists(cachedir):
             os.mkdir(cachedir)
 
@@ -189,50 +189,50 @@
         # and
         #    /pserver/user/server/path
         # are mapped to different cache file names.
-        cachefile = root.split(":") + [directory, "cache"]
-        cachefile = ['-'.join(re.findall(br'\w+', s)) for s in cachefile if s]
+        cachefile = root.split(b":") + [directory, b"cache"]
+        cachefile = [b'-'.join(re.findall(br'\w+', s)) for s in cachefile if s]
         cachefile = os.path.join(
-            cachedir, '.'.join([s for s in cachefile if s])
+            cachedir, b'.'.join([s for s in cachefile if s])
         )
 
-    if cache == 'update':
+    if cache == b'update':
         try:
-            ui.note(_('reading cvs log cache %s\n') % cachefile)
-            oldlog = pickle.load(open(cachefile, 'rb'))
+            ui.note(_(b'reading cvs log cache %s\n') % cachefile)
+            oldlog = pickle.load(open(cachefile, b'rb'))
             for e in oldlog:
                 if not (
-                    util.safehasattr(e, 'branchpoints')
-                    and util.safehasattr(e, 'commitid')
-                    and util.safehasattr(e, 'mergepoint')
+                    util.safehasattr(e, b'branchpoints')
+                    and util.safehasattr(e, b'commitid')
+                    and util.safehasattr(e, b'mergepoint')
                 ):
-                    ui.status(_('ignoring old cache\n'))
+                    ui.status(_(b'ignoring old cache\n'))
                     oldlog = []
                     break
 
-            ui.note(_('cache has %d log entries\n') % len(oldlog))
+            ui.note(_(b'cache has %d log entries\n') % len(oldlog))
         except Exception as e:
-            ui.note(_('error reading cache: %r\n') % e)
+            ui.note(_(b'error reading cache: %r\n') % e)
 
         if oldlog:
             date = oldlog[-1].date  # last commit date as a (time,tz) tuple
-            date = dateutil.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
+            date = dateutil.datestr(date, b'%Y/%m/%d %H:%M:%S %1%2')
 
     # build the CVS commandline
-    cmd = ['cvs', '-q']
+    cmd = [b'cvs', b'-q']
     if root:
-        cmd.append('-d%s' % root)
+        cmd.append(b'-d%s' % root)
         p = util.normpath(getrepopath(root))
-        if not p.endswith('/'):
-            p += '/'
+        if not p.endswith(b'/'):
+            p += b'/'
         if prefix:
             # looks like normpath replaces "" by "."
             prefix = p + util.normpath(prefix)
         else:
             prefix = p
-    cmd.append(['log', 'rlog'][rlog])
+    cmd.append([b'log', b'rlog'][rlog])
     if date:
         # no space between option and date string
-        cmd.append('-d>%s' % date)
+        cmd.append(b'-d>%s' % date)
     cmd.append(directory)
 
     # state machine begins here
@@ -243,17 +243,17 @@
     store = False  # set when a new record can be appended
 
     cmd = [procutil.shellquote(arg) for arg in cmd]
-    ui.note(_("running %s\n") % (' '.join(cmd)))
-    ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
+    ui.note(_(b"running %s\n") % (b' '.join(cmd)))
+    ui.debug(b"prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
 
-    pfp = procutil.popen(' '.join(cmd), 'rb')
+    pfp = procutil.popen(b' '.join(cmd), b'rb')
     peek = util.fromnativeeol(pfp.readline())
     while True:
         line = peek
-        if line == '':
+        if line == b'':
             break
         peek = util.fromnativeeol(pfp.readline())
-        if line.endswith('\n'):
+        if line.endswith(b'\n'):
             line = line[:-1]
         # ui.debug('state=%d line=%r\n' % (state, line))
 
@@ -267,12 +267,12 @@
                     filename = util.normpath(rcs[:-2])
                     if filename.startswith(prefix):
                         filename = filename[len(prefix) :]
-                    if filename.startswith('/'):
+                    if filename.startswith(b'/'):
                         filename = filename[1:]
-                    if filename.startswith('Attic/'):
+                    if filename.startswith(b'Attic/'):
                         filename = filename[6:]
                     else:
-                        filename = filename.replace('/Attic/', '/')
+                        filename = filename.replace(b'/Attic/', b'/')
                     state = 2
                     continue
                 state = 1
@@ -289,7 +289,7 @@
         elif state == 1:
             # expect 'Working file' (only when using log instead of rlog)
             match = re_10.match(line)
-            assert match, _('RCS file must be followed by working file')
+            assert match, _(b'RCS file must be followed by working file')
             filename = util.normpath(match.group(1))
             state = 2
 
@@ -303,7 +303,7 @@
             # read the symbolic names and store as tags
             match = re_30.match(line)
             if match:
-                rev = [int(x) for x in match.group(2).split('.')]
+                rev = [int(x) for x in match.group(2).split(b'.')]
 
                 # Convert magic branch number to an odd-numbered one
                 revn = len(rev)
@@ -327,7 +327,7 @@
                 state = 5
             else:
                 assert not re_32.match(line), _(
-                    'must have at least ' 'some revisions'
+                    b'must have at least ' b'some revisions'
                 )
 
         elif state == 5:
@@ -335,11 +335,11 @@
             # we create the logentry here from values stored in states 0 to 4,
             # as this state is re-entered for subsequent revisions of a file.
             match = re_50.match(line)
-            assert match, _('expected revision number')
+            assert match, _(b'expected revision number')
             e = logentry(
                 rcs=scache(rcs),
                 file=scache(filename),
-                revision=tuple([int(x) for x in match.group(1).split('.')]),
+                revision=tuple([int(x) for x in match.group(1).split(b'.')]),
                 branches=[],
                 parent=None,
                 commitid=None,
@@ -352,21 +352,25 @@
         elif state == 6:
             # expecting date, author, state, lines changed
             match = re_60.match(line)
-            assert match, _('revision must be followed by date line')
+            assert match, _(b'revision must be followed by date line')
             d = match.group(1)
-            if d[2] == '/':
+            if d[2] == b'/':
                 # Y2K
-                d = '19' + d
+                d = b'19' + d
 
             if len(d.split()) != 3:
                 # cvs log dates always in GMT
-                d = d + ' UTC'
+                d = d + b' UTC'
             e.date = dateutil.parsedate(
                 d,
-                ['%y/%m/%d %H:%M:%S', '%Y/%m/%d %H:%M:%S', '%Y-%m-%d %H:%M:%S'],
+                [
+                    b'%y/%m/%d %H:%M:%S',
+                    b'%Y/%m/%d %H:%M:%S',
+                    b'%Y-%m-%d %H:%M:%S',
+                ],
             )
             e.author = scache(match.group(2))
-            e.dead = match.group(3).lower() == 'dead'
+            e.dead = match.group(3).lower() == b'dead'
 
             if match.group(5):
                 if match.group(6):
@@ -382,14 +386,14 @@
                 e.commitid = match.group(8)
 
             if match.group(9):  # cvsnt mergepoint
-                myrev = match.group(10).split('.')
+                myrev = match.group(10).split(b'.')
                 if len(myrev) == 2:  # head
-                    e.mergepoint = 'HEAD'
+                    e.mergepoint = b'HEAD'
                 else:
-                    myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
+                    myrev = b'.'.join(myrev[:-2] + [b'0', myrev[-2]])
                     branches = [b for b in branchmap if branchmap[b] == myrev]
                     assert len(branches) == 1, (
-                        'unknown branch: %s' % e.mergepoint
+                        b'unknown branch: %s' % e.mergepoint
                     )
                     e.mergepoint = branches[0]
 
@@ -402,8 +406,8 @@
             m = re_70.match(line)
             if m:
                 e.branches = [
-                    tuple([int(y) for y in x.strip().split('.')])
-                    for x in m.group(1).split(';')
+                    tuple([int(y) for y in x.strip().split(b'.')])
+                    for x in m.group(1).split(b';')
                 ]
                 state = 8
             elif re_31.match(line) and re_50.match(peek):
@@ -419,7 +423,7 @@
             # store commit log message
             if re_31.match(line):
                 cpeek = peek
-                if cpeek.endswith('\n'):
+                if cpeek.endswith(b'\n'):
                     cpeek = cpeek[:-1]
                 if re_50.match(cpeek):
                     state = 5
@@ -447,7 +451,7 @@
             and file_added_re.match(e.comment[0])
         ):
             ui.debug(
-                'found synthetic revision in %s: %r\n' % (e.rcs, e.comment[0])
+                b'found synthetic revision in %s: %r\n' % (e.rcs, e.comment[0])
             )
             e.synthetic = True
 
@@ -455,7 +459,7 @@
             # clean up the results and save in the log.
             store = False
             e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
-            e.comment = scache('\n'.join(e.comment))
+            e.comment = scache(b'\n'.join(e.comment))
 
             revn = len(e.revision)
             if revn > 3 and (revn % 2) == 0:
@@ -466,7 +470,7 @@
             # find the branches starting from this revision
             branchpoints = set()
             for branch, revision in branchmap.iteritems():
-                revparts = tuple([int(i) for i in revision.split('.')])
+                revparts = tuple([int(i) for i in revision.split(b'.')])
                 if len(revparts) < 2:  # bad tags
                     continue
                 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
@@ -480,11 +484,12 @@
 
             log.append(e)
 
-            rcsmap[e.rcs.replace('/Attic/', '/')] = e.rcs
+            rcsmap[e.rcs.replace(b'/Attic/', b'/')] = e.rcs
 
             if len(log) % 100 == 0:
                 ui.status(
-                    stringutil.ellipsis('%d %s' % (len(log), e.file), 80) + '\n'
+                    stringutil.ellipsis(b'%d %s' % (len(log), e.file), 80)
+                    + b'\n'
                 )
 
     log.sort(key=lambda x: (x.rcs, x.revision))
@@ -492,7 +497,7 @@
     # find parent revisions of individual files
     versions = {}
     for e in sorted(oldlog, key=lambda x: (x.rcs, x.revision)):
-        rcs = e.rcs.replace('/Attic/', '/')
+        rcs = e.rcs.replace(b'/Attic/', b'/')
         if rcs in rcsmap:
             e.rcs = rcsmap[rcs]
         branch = e.revision[:-1]
@@ -515,28 +520,28 @@
             if oldlog and oldlog[-1].date >= log[0].date:
                 raise logerror(
                     _(
-                        'log cache overlaps with new log entries,'
-                        ' re-run without cache.'
+                        b'log cache overlaps with new log entries,'
+                        b' re-run without cache.'
                     )
                 )
 
             log = oldlog + log
 
             # write the new cachefile
-            ui.note(_('writing cvs log cache %s\n') % cachefile)
-            pickle.dump(log, open(cachefile, 'wb'))
+            ui.note(_(b'writing cvs log cache %s\n') % cachefile)
+            pickle.dump(log, open(cachefile, b'wb'))
         else:
             log = oldlog
 
-    ui.status(_('%d log entries\n') % len(log))
+    ui.status(_(b'%d log entries\n') % len(log))
 
-    encodings = ui.configlist('convert', 'cvsps.logencoding')
+    encodings = ui.configlist(b'convert', b'cvsps.logencoding')
     if encodings:
 
         def revstr(r):
             # this is needed, because logentry.revision is a tuple of "int"
             # (e.g. (1, 2) for "1.2")
-            return '.'.join(pycompat.maplist(pycompat.bytestr, r))
+            return b'.'.join(pycompat.maplist(pycompat.bytestr, r))
 
         for entry in log:
             comment = entry.comment
@@ -547,7 +552,7 @@
                     )
                     if ui.debugflag:
                         ui.debug(
-                            "transcoding by %s: %s of %s\n"
+                            b"transcoding by %s: %s of %s\n"
                             % (e, revstr(entry.revision), entry.file)
                         )
                     break
@@ -557,20 +562,22 @@
                     raise error.Abort(
                         inst,
                         hint=_(
-                            'check convert.cvsps.logencoding' ' configuration'
+                            b'check convert.cvsps.logencoding' b' configuration'
                         ),
                     )
             else:
                 raise error.Abort(
                     _(
-                        "no encoding can transcode"
-                        " CVS log message for %s of %s"
+                        b"no encoding can transcode"
+                        b" CVS log message for %s of %s"
                     )
                     % (revstr(entry.revision), entry.file),
-                    hint=_('check convert.cvsps.logencoding' ' configuration'),
+                    hint=_(
+                        b'check convert.cvsps.logencoding' b' configuration'
+                    ),
                 )
 
-    hook.hook(ui, None, "cvslog", True, log=log)
+    hook.hook(ui, None, b"cvslog", True, log=log)
 
     return log
 
@@ -597,14 +604,16 @@
         self.__dict__.update(entries)
 
     def __repr__(self):
-        items = ("%s=%r" % (k, self.__dict__[k]) for k in sorted(self.__dict__))
-        return "%s(%s)" % (type(self).__name__, ", ".join(items))
+        items = (
+            b"%s=%r" % (k, self.__dict__[k]) for k in sorted(self.__dict__)
+        )
+        return b"%s(%s)" % (type(self).__name__, b", ".join(items))
 
 
 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
     '''Convert log into changesets.'''
 
-    ui.status(_('creating changesets\n'))
+    ui.status(_(b'creating changesets\n'))
 
     # try to order commitids by date
     mindate = {}
@@ -619,10 +628,10 @@
     log.sort(
         key=lambda x: (
             mindate.get(x.commitid, (-1, 0)),
-            x.commitid or '',
+            x.commitid or b'',
             x.comment,
             x.author,
-            x.branch or '',
+            x.branch or b'',
             x.date,
             x.branchpoints,
         )
@@ -682,8 +691,8 @@
 
             files = set()
             if len(changesets) % 100 == 0:
-                t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
-                ui.status(stringutil.ellipsis(t, 80) + '\n')
+                t = b'%d %s' % (len(changesets), repr(e.comment)[1:-1])
+                ui.status(stringutil.ellipsis(t, 80) + b'\n')
 
         c.entries.append(e)
         files.add(e.file)
@@ -705,9 +714,9 @@
     # Sort files in each changeset
 
     def entitycompare(l, r):
-        'Mimic cvsps sorting order'
-        l = l.file.split('/')
-        r = r.file.split('/')
+        b'Mimic cvsps sorting order'
+        l = l.file.split(b'/')
+        r = r.file.split(b'/')
         nl = len(l)
         nr = len(r)
         n = min(nl, nr)
@@ -842,7 +851,7 @@
             # Ensure no changeset has a synthetic changeset as a parent.
             while p.synthetic:
                 assert len(p.parents) <= 1, _(
-                    'synthetic changeset cannot have multiple parents'
+                    b'synthetic changeset cannot have multiple parents'
                 )
                 if p.parents:
                     p = p.parents[0]
@@ -854,7 +863,7 @@
                 c.parents.append(p)
 
         if c.mergepoint:
-            if c.mergepoint == 'HEAD':
+            if c.mergepoint == b'HEAD':
                 c.mergepoint = None
             c.parents.append(changesets[branches[c.mergepoint]])
 
@@ -862,15 +871,15 @@
             m = mergefrom.search(c.comment)
             if m:
                 m = m.group(1)
-                if m == 'HEAD':
+                if m == b'HEAD':
                     m = None
                 try:
                     candidate = changesets[branches[m]]
                 except KeyError:
                     ui.warn(
                         _(
-                            "warning: CVS commit message references "
-                            "non-existent branch %r:\n%s\n"
+                            b"warning: CVS commit message references "
+                            b"non-existent branch %r:\n%s\n"
                         )
                         % (pycompat.bytestr(m), c.comment)
                     )
@@ -882,7 +891,7 @@
             if m:
                 if m.groups():
                     m = m.group(1)
-                    if m == 'HEAD':
+                    if m == b'HEAD':
                         m = None
                 else:
                     m = None  # if no group found then merge to HEAD
@@ -892,7 +901,7 @@
                         author=c.author,
                         branch=m,
                         date=c.date,
-                        comment='convert-repo: CVS merge from branch %s'
+                        comment=b'convert-repo: CVS merge from branch %s'
                         % c.branch,
                         entries=[],
                         tags=[],
@@ -927,13 +936,13 @@
         for l, r in odd:
             if l.id is not None and r.id is not None:
                 ui.warn(
-                    _('changeset %d is both before and after %d\n')
+                    _(b'changeset %d is both before and after %d\n')
                     % (l.id, r.id)
                 )
 
-    ui.status(_('%d changeset entries\n') % len(changesets))
+    ui.status(_(b'%d changeset entries\n') % len(changesets))
 
-    hook.hook(ui, None, "cvschangesets", True, changesets=changesets)
+    hook.hook(ui, None, b"cvschangesets", True, changesets=changesets)
 
     return changesets
 
@@ -944,27 +953,27 @@
     commit log entries and dates.
     '''
     opts = pycompat.byteskwargs(opts)
-    if opts["new_cache"]:
-        cache = "write"
-    elif opts["update_cache"]:
-        cache = "update"
+    if opts[b"new_cache"]:
+        cache = b"write"
+    elif opts[b"update_cache"]:
+        cache = b"update"
     else:
         cache = None
 
-    revisions = opts["revisions"]
+    revisions = opts[b"revisions"]
 
     try:
         if args:
             log = []
             for d in args:
-                log += createlog(ui, d, root=opts["root"], cache=cache)
+                log += createlog(ui, d, root=opts[b"root"], cache=cache)
         else:
-            log = createlog(ui, root=opts["root"], cache=cache)
+            log = createlog(ui, root=opts[b"root"], cache=cache)
     except logerror as e:
-        ui.write("%r\n" % e)
+        ui.write(b"%r\n" % e)
         return
 
-    changesets = createchangeset(ui, log, opts["fuzz"])
+    changesets = createchangeset(ui, log, opts[b"fuzz"])
     del log
 
     # Print changesets (optionally filtered)
@@ -974,7 +983,7 @@
     ancestors = {}  # parent branch
     for cs in changesets:
 
-        if opts["ancestors"]:
+        if opts[b"ancestors"]:
             if cs.branch not in branches and cs.parents and cs.parents[0].id:
                 ancestors[cs.branch] = (
                     changesets[cs.parents[0].id - 1].branch,
@@ -983,72 +992,75 @@
             branches[cs.branch] = cs.id
 
         # limit by branches
-        if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]:
+        if (
+            opts[b"branches"]
+            and (cs.branch or b'HEAD') not in opts[b"branches"]
+        ):
             continue
 
         if not off:
             # Note: trailing spaces on several lines here are needed to have
             #       bug-for-bug compatibility with cvsps.
-            ui.write('---------------------\n')
-            ui.write(('PatchSet %d \n' % cs.id))
+            ui.write(b'---------------------\n')
+            ui.write((b'PatchSet %d \n' % cs.id))
             ui.write(
                 (
-                    'Date: %s\n'
-                    % dateutil.datestr(cs.date, '%Y/%m/%d %H:%M:%S %1%2')
+                    b'Date: %s\n'
+                    % dateutil.datestr(cs.date, b'%Y/%m/%d %H:%M:%S %1%2')
                 )
             )
-            ui.write(('Author: %s\n' % cs.author))
-            ui.write(('Branch: %s\n' % (cs.branch or 'HEAD')))
+            ui.write((b'Author: %s\n' % cs.author))
+            ui.write((b'Branch: %s\n' % (cs.branch or b'HEAD')))
             ui.write(
                 (
-                    'Tag%s: %s \n'
+                    b'Tag%s: %s \n'
                     % (
-                        ['', 's'][len(cs.tags) > 1],
-                        ','.join(cs.tags) or '(none)',
+                        [b'', b's'][len(cs.tags) > 1],
+                        b','.join(cs.tags) or b'(none)',
                     )
                 )
             )
             if cs.branchpoints:
                 ui.write(
-                    'Branchpoints: %s \n' % ', '.join(sorted(cs.branchpoints))
+                    b'Branchpoints: %s \n' % b', '.join(sorted(cs.branchpoints))
                 )
-            if opts["parents"] and cs.parents:
+            if opts[b"parents"] and cs.parents:
                 if len(cs.parents) > 1:
                     ui.write(
                         (
-                            'Parents: %s\n'
-                            % (','.join([(b"%d" % p.id) for p in cs.parents]))
+                            b'Parents: %s\n'
+                            % (b','.join([(b"%d" % p.id) for p in cs.parents]))
                         )
                     )
                 else:
-                    ui.write(('Parent: %d\n' % cs.parents[0].id))
+                    ui.write((b'Parent: %d\n' % cs.parents[0].id))
 
-            if opts["ancestors"]:
+            if opts[b"ancestors"]:
                 b = cs.branch
                 r = []
                 while b:
                     b, c = ancestors[b]
-                    r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
+                    r.append(b'%s:%d:%d' % (b or b"HEAD", c, branches[b]))
                 if r:
-                    ui.write(('Ancestors: %s\n' % (','.join(r))))
+                    ui.write((b'Ancestors: %s\n' % (b','.join(r))))
 
-            ui.write('Log:\n')
-            ui.write('%s\n\n' % cs.comment)
-            ui.write('Members: \n')
+            ui.write(b'Log:\n')
+            ui.write(b'%s\n\n' % cs.comment)
+            ui.write(b'Members: \n')
             for f in cs.entries:
                 fn = f.file
-                if fn.startswith(opts["prefix"]):
-                    fn = fn[len(opts["prefix"]) :]
+                if fn.startswith(opts[b"prefix"]):
+                    fn = fn[len(opts[b"prefix"]) :]
                 ui.write(
-                    '\t%s:%s->%s%s \n'
+                    b'\t%s:%s->%s%s \n'
                     % (
                         fn,
-                        '.'.join([b"%d" % x for x in f.parent]) or 'INITIAL',
-                        '.'.join([(b"%d" % x) for x in f.revision]),
-                        ['', '(DEAD)'][f.dead],
+                        b'.'.join([b"%d" % x for x in f.parent]) or b'INITIAL',
+                        b'.'.join([(b"%d" % x) for x in f.revision]),
+                        [b'', b'(DEAD)'][f.dead],
                     )
                 )
-            ui.write('\n')
+            ui.write(b'\n')
 
         # have we seen the start tag?
         if revisions and off:
--- a/hgext/convert/darcs.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/convert/darcs.py	Sun Oct 06 09:48:39 2019 -0400
@@ -46,22 +46,22 @@
 class darcs_source(common.converter_source, common.commandline):
     def __init__(self, ui, repotype, path, revs=None):
         common.converter_source.__init__(self, ui, repotype, path, revs=revs)
-        common.commandline.__init__(self, ui, 'darcs')
+        common.commandline.__init__(self, ui, b'darcs')
 
         # check for _darcs, ElementTree so that we can easily skip
         # test-convert-darcs if ElementTree is not around
-        if not os.path.exists(os.path.join(path, '_darcs')):
-            raise NoRepo(_("%s does not look like a darcs repository") % path)
+        if not os.path.exists(os.path.join(path, b'_darcs')):
+            raise NoRepo(_(b"%s does not look like a darcs repository") % path)
 
-        common.checktool('darcs')
-        version = self.run0('--version').splitlines()[0].strip()
-        if version < '2.1':
+        common.checktool(b'darcs')
+        version = self.run0(b'--version').splitlines()[0].strip()
+        if version < b'2.1':
             raise error.Abort(
-                _('darcs version 2.1 or newer needed (found %r)') % version
+                _(b'darcs version 2.1 or newer needed (found %r)') % version
             )
 
-        if "ElementTree" not in globals():
-            raise error.Abort(_("Python ElementTree module is not available"))
+        if b"ElementTree" not in globals():
+            raise error.Abort(_(b"Python ElementTree module is not available"))
 
         self.path = os.path.realpath(path)
 
@@ -73,30 +73,33 @@
         # Check darcs repository format
         format = self.format()
         if format:
-            if format in ('darcs-1.0', 'hashed'):
+            if format in (b'darcs-1.0', b'hashed'):
                 raise NoRepo(
-                    _("%s repository format is unsupported, " "please upgrade")
+                    _(
+                        b"%s repository format is unsupported, "
+                        b"please upgrade"
+                    )
                     % format
                 )
         else:
-            self.ui.warn(_('failed to detect repository format!'))
+            self.ui.warn(_(b'failed to detect repository format!'))
 
     def before(self):
         self.tmppath = pycompat.mkdtemp(
-            prefix='convert-' + os.path.basename(self.path) + '-'
+            prefix=b'convert-' + os.path.basename(self.path) + b'-'
         )
-        output, status = self.run('init', repodir=self.tmppath)
+        output, status = self.run(b'init', repodir=self.tmppath)
         self.checkexit(status)
 
         tree = self.xml(
-            'changes', xml_output=True, summary=True, repodir=self.path
+            b'changes', xml_output=True, summary=True, repodir=self.path
         )
         tagname = None
         child = None
-        for elt in tree.findall('patch'):
-            node = elt.get('hash')
-            name = elt.findtext('name', '')
-            if name.startswith('TAG '):
+        for elt in tree.findall(b'patch'):
+            node = elt.get(b'hash')
+            name = elt.findtext(b'name', b'')
+            if name.startswith(b'TAG '):
                 tagname = name[4:].strip()
             elif tagname is not None:
                 self.tags[tagname] = node
@@ -107,7 +110,7 @@
         self.parents[child] = []
 
     def after(self):
-        self.ui.debug('cleaning up %s\n' % self.tmppath)
+        self.ui.debug(b'cleaning up %s\n' % self.tmppath)
         shutil.rmtree(self.tmppath, ignore_errors=True)
 
     def recode(self, s, encoding=None):
@@ -125,7 +128,7 @@
         # While we are decoding the XML as latin-1 to be as liberal as
         # possible, etree will still raise an exception if any
         # non-printable characters are in the XML changelog.
-        parser = XMLParser(encoding='latin-1')
+        parser = XMLParser(encoding=b'latin-1')
         p = self._run(cmd, **kwargs)
         etree.parse(p.stdout, parser=parser)
         p.wait()
@@ -133,20 +136,20 @@
         return etree.getroot()
 
     def format(self):
-        output, status = self.run('show', 'repo', repodir=self.path)
+        output, status = self.run(b'show', b'repo', repodir=self.path)
         self.checkexit(status)
         m = re.search(r'^\s*Format:\s*(.*)$', output, re.MULTILINE)
         if not m:
             return None
-        return ','.join(sorted(f.strip() for f in m.group(1).split(',')))
+        return b','.join(sorted(f.strip() for f in m.group(1).split(b',')))
 
     def manifest(self):
         man = []
         output, status = self.run(
-            'show', 'files', no_directories=True, repodir=self.tmppath
+            b'show', b'files', no_directories=True, repodir=self.tmppath
         )
         self.checkexit(status)
-        for line in output.split('\n'):
+        for line in output.split(b'\n'):
             path = line[2:]
             if path:
                 man.append(path)
@@ -157,14 +160,14 @@
 
     def getcommit(self, rev):
         elt = self.changes[rev]
-        dateformat = '%a %b %d %H:%M:%S %Z %Y'
-        date = dateutil.strdate(elt.get('local_date'), dateformat)
-        desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
+        dateformat = b'%a %b %d %H:%M:%S %Z %Y'
+        date = dateutil.strdate(elt.get(b'local_date'), dateformat)
+        desc = elt.findtext(b'name') + b'\n' + elt.findtext(b'comment', b'')
         # etree can return unicode objects for name, comment, and author,
         # so recode() is used to ensure str objects are emitted.
-        newdateformat = '%Y-%m-%d %H:%M:%S %1%2'
+        newdateformat = b'%Y-%m-%d %H:%M:%S %1%2'
         return common.commit(
-            author=self.recode(elt.get('author')),
+            author=self.recode(elt.get(b'author')),
             date=dateutil.datestr(date, newdateformat),
             desc=self.recode(desc).strip(),
             parents=self.parents[rev],
@@ -172,34 +175,34 @@
 
     def pull(self, rev):
         output, status = self.run(
-            'pull',
+            b'pull',
             self.path,
             all=True,
-            match='hash %s' % rev,
+            match=b'hash %s' % rev,
             no_test=True,
             no_posthook=True,
-            external_merge='/bin/false',
+            external_merge=b'/bin/false',
             repodir=self.tmppath,
         )
         if status:
-            if output.find('We have conflicts in') == -1:
+            if output.find(b'We have conflicts in') == -1:
                 self.checkexit(status, output)
-            output, status = self.run('revert', all=True, repodir=self.tmppath)
+            output, status = self.run(b'revert', all=True, repodir=self.tmppath)
             self.checkexit(status, output)
 
     def getchanges(self, rev, full):
         if full:
-            raise error.Abort(_("convert from darcs does not support --full"))
+            raise error.Abort(_(b"convert from darcs does not support --full"))
         copies = {}
         changes = []
         man = None
-        for elt in self.changes[rev].find('summary').getchildren():
-            if elt.tag in ('add_directory', 'remove_directory'):
+        for elt in self.changes[rev].find(b'summary').getchildren():
+            if elt.tag in (b'add_directory', b'remove_directory'):
                 continue
-            if elt.tag == 'move':
+            if elt.tag == b'move':
                 if man is None:
                     man = self.manifest()
-                source, dest = elt.get('from'), elt.get('to')
+                source, dest = elt.get(b'from'), elt.get(b'to')
                 if source in man:
                     # File move
                     changes.append((source, rev))
@@ -207,11 +210,11 @@
                     copies[dest] = source
                 else:
                     # Directory move, deduce file moves from manifest
-                    source = source + '/'
+                    source = source + b'/'
                     for f in man:
                         if not f.startswith(source):
                             continue
-                        fdest = dest + '/' + f[len(source) :]
+                        fdest = dest + b'/' + f[len(source) :]
                         changes.append((f, rev))
                         changes.append((fdest, rev))
                         copies[fdest] = f
@@ -223,7 +226,7 @@
 
     def getfile(self, name, rev):
         if rev != self.lastrev:
-            raise error.Abort(_('internal calling inconsistency'))
+            raise error.Abort(_(b'internal calling inconsistency'))
         path = os.path.join(self.tmppath, name)
         try:
             data = util.readfile(path)
@@ -232,7 +235,7 @@
             if inst.errno == errno.ENOENT:
                 return None, None
             raise
-        mode = (mode & 0o111) and 'x' or ''
+        mode = (mode & 0o111) and b'x' or b''
         return data, mode
 
     def gettags(self):
--- a/hgext/convert/filemap.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/convert/filemap.py	Sun Oct 06 09:48:39 2019 -0400
@@ -30,8 +30,8 @@
     i = len(path)
     while i != -1:
         yield path[:i], path[i + 1 :]
-        i = path.rfind('/', 0, i)
-    yield '.', path
+        i = path.rfind(b'/', 0, i)
+    yield b'.', path
 
 
 def normalize(path):
@@ -55,7 +55,7 @@
         self.targetprefixes = None
         if path:
             if self.parse(path):
-                raise error.Abort(_('errors in filemap'))
+                raise error.Abort(_(b'errors in filemap'))
 
     def parse(self, path):
         errs = 0
@@ -63,48 +63,48 @@
         def check(name, mapping, listname):
             if not name:
                 self.ui.warn(
-                    _('%s:%d: path to %s is missing\n')
+                    _(b'%s:%d: path to %s is missing\n')
                     % (lex.infile, lex.lineno, listname)
                 )
                 return 1
             if name in mapping:
                 self.ui.warn(
-                    _('%s:%d: %r already in %s list\n')
+                    _(b'%s:%d: %r already in %s list\n')
                     % (lex.infile, lex.lineno, name, listname)
                 )
                 return 1
-            if name.startswith('/') or name.endswith('/') or '//' in name:
+            if name.startswith(b'/') or name.endswith(b'/') or b'//' in name:
                 self.ui.warn(
-                    _('%s:%d: superfluous / in %s %r\n')
+                    _(b'%s:%d: superfluous / in %s %r\n')
                     % (lex.infile, lex.lineno, listname, pycompat.bytestr(name))
                 )
                 return 1
             return 0
 
         lex = common.shlexer(
-            filepath=path, wordchars='!@#$%^&*()-=+[]{}|;:,./<>?'
+            filepath=path, wordchars=b'!@#$%^&*()-=+[]{}|;:,./<>?'
         )
         cmd = lex.get_token()
         while cmd:
-            if cmd == 'include':
+            if cmd == b'include':
                 name = normalize(lex.get_token())
-                errs += check(name, self.exclude, 'exclude')
+                errs += check(name, self.exclude, b'exclude')
                 self.include[name] = name
-            elif cmd == 'exclude':
+            elif cmd == b'exclude':
                 name = normalize(lex.get_token())
-                errs += check(name, self.include, 'include')
-                errs += check(name, self.rename, 'rename')
+                errs += check(name, self.include, b'include')
+                errs += check(name, self.rename, b'rename')
                 self.exclude[name] = name
-            elif cmd == 'rename':
+            elif cmd == b'rename':
                 src = normalize(lex.get_token())
                 dest = normalize(lex.get_token())
-                errs += check(src, self.exclude, 'exclude')
+                errs += check(src, self.exclude, b'exclude')
                 self.rename[src] = dest
-            elif cmd == 'source':
+            elif cmd == b'source':
                 errs += self.parse(normalize(lex.get_token()))
             else:
                 self.ui.warn(
-                    _('%s:%d: unknown directive %r\n')
+                    _(b'%s:%d: unknown directive %r\n')
                     % (lex.infile, lex.lineno, pycompat.bytestr(cmd))
                 )
                 errs += 1
@@ -118,7 +118,7 @@
                 return mapping[pre], pre, suf
             except KeyError:
                 pass
-        return '', name, ''
+        return b'', name, b''
 
     def istargetfile(self, filename):
         """Return true if the given target filename is covered as a destination
@@ -131,7 +131,7 @@
 
         # If "." is a target, then all target files are considered from the
         # source.
-        if not self.targetprefixes or '.' in self.targetprefixes:
+        if not self.targetprefixes or b'.' in self.targetprefixes:
             return True
 
         filename = normalize(filename)
@@ -152,17 +152,17 @@
         if self.exclude:
             exc = self.lookup(name, self.exclude)[0]
         else:
-            exc = ''
+            exc = b''
         if (not self.include and exc) or (len(inc) <= len(exc)):
             return None
         newpre, pre, suf = self.lookup(name, self.rename)
         if newpre:
-            if newpre == '.':
+            if newpre == b'.':
                 return suf
             if suf:
-                if newpre.endswith('/'):
+                if newpre.endswith(b'/'):
                     return newpre + suf
-                return newpre + '/' + suf
+                return newpre + b'/' + suf
             return newpre
         return name
 
@@ -204,7 +204,7 @@
         self.seenchildren = {}
         # experimental config: convert.ignoreancestorcheck
         self.ignoreancestorcheck = self.ui.configbool(
-            'convert', 'ignoreancestorcheck'
+            b'convert', b'ignoreancestorcheck'
         )
 
     def before(self):
@@ -256,7 +256,7 @@
                 try:
                     self.origparents[rev] = self.getcommit(rev).parents
                 except error.RepoLookupError:
-                    self.ui.debug("unknown revmap source: %s\n" % rev)
+                    self.ui.debug(b"unknown revmap source: %s\n" % rev)
                     continue
             if arg is not None:
                 self.children[arg] = self.children.get(arg, 0) + 1
@@ -316,7 +316,7 @@
         try:
             files = self.base.getchangedfiles(rev, i)
         except NotImplementedError:
-            raise error.Abort(_("source repository doesn't support --filemap"))
+            raise error.Abort(_(b"source repository doesn't support --filemap"))
         for f in files:
             if self.filemapper(f):
                 return True
@@ -331,7 +331,7 @@
         # close marker is significant (i.e. all of the branch ancestors weren't
         # eliminated).  Therefore if there *is* a close marker, getchanges()
         # doesn't consider it significant, and this revision should be dropped.
-        return not files and 'close' not in self.commits[rev].extra
+        return not files and b'close' not in self.commits[rev].extra
 
     def mark_not_wanted(self, rev, p):
         # Mark rev as not interesting and update data structures.
@@ -363,7 +363,9 @@
             if p in self.wantedancestors:
                 wrev.update(self.wantedancestors[p])
             else:
-                self.ui.warn(_('warning: %s parent %s is missing\n') % (rev, p))
+                self.ui.warn(
+                    _(b'warning: %s parent %s is missing\n') % (rev, p)
+                )
         wrev.add(rev)
         self.wantedancestors[rev] = wrev
 
@@ -423,7 +425,7 @@
         self.origparents[rev] = parents
 
         closed = False
-        if 'close' in self.commits[rev].extra:
+        if b'close' in self.commits[rev].extra:
             # A branch closing revision is only useful if one of its
             # parents belong to the branch being closed
             pbranches = [self._cachedcommit(p).branch for p in mparents]
--- a/hgext/convert/git.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/convert/git.py	Sun Oct 06 09:48:39 2019 -0400
@@ -26,22 +26,22 @@
         self.url = url
 
     def hgsub(self):
-        return "%s = [git]%s" % (self.path, self.url)
+        return b"%s = [git]%s" % (self.path, self.url)
 
     def hgsubstate(self):
-        return "%s %s" % (self.node, self.path)
+        return b"%s %s" % (self.node, self.path)
 
 
 # Keys in extra fields that should not be copied if the user requests.
 bannedextrakeys = {
     # Git commit object built-ins.
-    'tree',
-    'parent',
-    'author',
-    'committer',
+    b'tree',
+    b'parent',
+    b'author',
+    b'committer',
     # Mercurial built-ins.
-    'branch',
-    'close',
+    b'branch',
+    b'close',
 }
 
 
@@ -51,7 +51,7 @@
     # both issues.
 
     def _gitcmd(self, cmd, *args, **kwargs):
-        return cmd('--git-dir=%s' % self.path, *args, **kwargs)
+        return cmd(b'--git-dir=%s' % self.path, *args, **kwargs)
 
     def gitrun0(self, *args, **kwargs):
         return self._gitcmd(self.run0, *args, **kwargs)
@@ -70,100 +70,104 @@
 
     def __init__(self, ui, repotype, path, revs=None):
         super(convert_git, self).__init__(ui, repotype, path, revs=revs)
-        common.commandline.__init__(self, ui, 'git')
+        common.commandline.__init__(self, ui, b'git')
 
         # Pass an absolute path to git to prevent from ever being interpreted
         # as a URL
         path = os.path.abspath(path)
 
-        if os.path.isdir(path + "/.git"):
-            path += "/.git"
-        if not os.path.exists(path + "/objects"):
+        if os.path.isdir(path + b"/.git"):
+            path += b"/.git"
+        if not os.path.exists(path + b"/objects"):
             raise common.NoRepo(
-                _("%s does not look like a Git repository") % path
+                _(b"%s does not look like a Git repository") % path
             )
 
         # The default value (50) is based on the default for 'git diff'.
-        similarity = ui.configint('convert', 'git.similarity')
+        similarity = ui.configint(b'convert', b'git.similarity')
         if similarity < 0 or similarity > 100:
-            raise error.Abort(_('similarity must be between 0 and 100'))
+            raise error.Abort(_(b'similarity must be between 0 and 100'))
         if similarity > 0:
-            self.simopt = ['-C%d%%' % similarity]
-            findcopiesharder = ui.configbool('convert', 'git.findcopiesharder')
+            self.simopt = [b'-C%d%%' % similarity]
+            findcopiesharder = ui.configbool(
+                b'convert', b'git.findcopiesharder'
+            )
             if findcopiesharder:
-                self.simopt.append('--find-copies-harder')
+                self.simopt.append(b'--find-copies-harder')
 
-            renamelimit = ui.configint('convert', 'git.renamelimit')
-            self.simopt.append('-l%d' % renamelimit)
+            renamelimit = ui.configint(b'convert', b'git.renamelimit')
+            self.simopt.append(b'-l%d' % renamelimit)
         else:
             self.simopt = []
 
-        common.checktool('git', 'git')
+        common.checktool(b'git', b'git')
 
         self.path = path
         self.submodules = []
 
-        self.catfilepipe = self.gitpipe('cat-file', '--batch')
+        self.catfilepipe = self.gitpipe(b'cat-file', b'--batch')
 
-        self.copyextrakeys = self.ui.configlist('convert', 'git.extrakeys')
+        self.copyextrakeys = self.ui.configlist(b'convert', b'git.extrakeys')
         banned = set(self.copyextrakeys) & bannedextrakeys
         if banned:
             raise error.Abort(
-                _('copying of extra key is forbidden: %s')
-                % _(', ').join(sorted(banned))
+                _(b'copying of extra key is forbidden: %s')
+                % _(b', ').join(sorted(banned))
             )
 
-        committeractions = self.ui.configlist('convert', 'git.committeractions')
+        committeractions = self.ui.configlist(
+            b'convert', b'git.committeractions'
+        )
 
         messagedifferent = None
         messagealways = None
         for a in committeractions:
-            if a.startswith(('messagedifferent', 'messagealways')):
+            if a.startswith((b'messagedifferent', b'messagealways')):
                 k = a
                 v = None
-                if '=' in a:
-                    k, v = a.split('=', 1)
+                if b'=' in a:
+                    k, v = a.split(b'=', 1)
 
-                if k == 'messagedifferent':
-                    messagedifferent = v or 'committer:'
-                elif k == 'messagealways':
-                    messagealways = v or 'committer:'
+                if k == b'messagedifferent':
+                    messagedifferent = v or b'committer:'
+                elif k == b'messagealways':
+                    messagealways = v or b'committer:'
 
         if messagedifferent and messagealways:
             raise error.Abort(
                 _(
-                    'committeractions cannot define both '
-                    'messagedifferent and messagealways'
+                    b'committeractions cannot define both '
+                    b'messagedifferent and messagealways'
                 )
             )
 
-        dropcommitter = 'dropcommitter' in committeractions
-        replaceauthor = 'replaceauthor' in committeractions
+        dropcommitter = b'dropcommitter' in committeractions
+        replaceauthor = b'replaceauthor' in committeractions
 
         if dropcommitter and replaceauthor:
             raise error.Abort(
                 _(
-                    'committeractions cannot define both '
-                    'dropcommitter and replaceauthor'
+                    b'committeractions cannot define both '
+                    b'dropcommitter and replaceauthor'
                 )
             )
 
         if dropcommitter and messagealways:
             raise error.Abort(
                 _(
-                    'committeractions cannot define both '
-                    'dropcommitter and messagealways'
+                    b'committeractions cannot define both '
+                    b'dropcommitter and messagealways'
                 )
             )
 
         if not messagedifferent and not messagealways:
-            messagedifferent = 'committer:'
+            messagedifferent = b'committer:'
 
         self.committeractions = {
-            'dropcommitter': dropcommitter,
-            'replaceauthor': replaceauthor,
-            'messagedifferent': messagedifferent,
-            'messagealways': messagealways,
+            b'dropcommitter': dropcommitter,
+            b'replaceauthor': replaceauthor,
+            b'messagedifferent': messagedifferent,
+            b'messagealways': messagealways,
         }
 
     def after(self):
@@ -172,35 +176,38 @@
 
     def getheads(self):
         if not self.revs:
-            output, status = self.gitrun('rev-parse', '--branches', '--remotes')
+            output, status = self.gitrun(
+                b'rev-parse', b'--branches', b'--remotes'
+            )
             heads = output.splitlines()
             if status:
-                raise error.Abort(_('cannot retrieve git heads'))
+                raise error.Abort(_(b'cannot retrieve git heads'))
         else:
             heads = []
             for rev in self.revs:
-                rawhead, ret = self.gitrun('rev-parse', '--verify', rev)
+                rawhead, ret = self.gitrun(b'rev-parse', b'--verify', rev)
                 heads.append(rawhead[:-1])
                 if ret:
-                    raise error.Abort(_('cannot retrieve git head "%s"') % rev)
+                    raise error.Abort(_(b'cannot retrieve git head "%s"') % rev)
         return heads
 
     def catfile(self, rev, ftype):
         if rev == nodemod.nullhex:
             raise IOError
-        self.catfilepipe[0].write(rev + '\n')
+        self.catfilepipe[0].write(rev + b'\n')
         self.catfilepipe[0].flush()
         info = self.catfilepipe[1].readline().split()
         if info[1] != ftype:
             raise error.Abort(
-                _('cannot read %r object at %s')
+                _(b'cannot read %r object at %s')
                 % (pycompat.bytestr(ftype), rev)
             )
         size = int(info[2])
         data = self.catfilepipe[1].read(size)
         if len(data) < size:
             raise error.Abort(
-                _('cannot read %r object at %s: unexpected size') % (ftype, rev)
+                _(b'cannot read %r object at %s: unexpected size')
+                % (ftype, rev)
             )
         # read the trailing newline
         self.catfilepipe[1].read(1)
@@ -209,14 +216,14 @@
     def getfile(self, name, rev):
         if rev == nodemod.nullhex:
             return None, None
-        if name == '.hgsub':
-            data = '\n'.join([m.hgsub() for m in self.submoditer()])
-            mode = ''
-        elif name == '.hgsubstate':
-            data = '\n'.join([m.hgsubstate() for m in self.submoditer()])
-            mode = ''
+        if name == b'.hgsub':
+            data = b'\n'.join([m.hgsub() for m in self.submoditer()])
+            mode = b''
+        elif name == b'.hgsubstate':
+            data = b'\n'.join([m.hgsubstate() for m in self.submoditer()])
+            mode = b''
         else:
-            data = self.catfile(rev, "blob")
+            data = self.catfile(rev, b"blob")
             mode = self.modecache[(name, rev)]
         return data, mode
 
@@ -236,21 +243,23 @@
         c = config.config()
         # Each item in .gitmodules starts with whitespace that cant be parsed
         c.parse(
-            '.gitmodules',
-            '\n'.join(line.strip() for line in content.split('\n')),
+            b'.gitmodules',
+            b'\n'.join(line.strip() for line in content.split(b'\n')),
         )
         for sec in c.sections():
             s = c[sec]
-            if 'url' in s and 'path' in s:
-                self.submodules.append(submodule(s['path'], '', s['url']))
+            if b'url' in s and b'path' in s:
+                self.submodules.append(submodule(s[b'path'], b'', s[b'url']))
 
     def retrievegitmodules(self, version):
-        modules, ret = self.gitrun('show', '%s:%s' % (version, '.gitmodules'))
+        modules, ret = self.gitrun(
+            b'show', b'%s:%s' % (version, b'.gitmodules')
+        )
         if ret:
             # This can happen if a file is in the repo that has permissions
             # 160000, but there is no .gitmodules file.
             self.ui.warn(
-                _("warning: cannot read submodules config file in " "%s\n")
+                _(b"warning: cannot read submodules config file in " b"%s\n")
                 % version
             )
             return
@@ -259,74 +268,76 @@
             self.parsegitmodules(modules)
         except error.ParseError:
             self.ui.warn(
-                _("warning: unable to parse .gitmodules in %s\n") % version
+                _(b"warning: unable to parse .gitmodules in %s\n") % version
             )
             return
 
         for m in self.submodules:
-            node, ret = self.gitrun('rev-parse', '%s:%s' % (version, m.path))
+            node, ret = self.gitrun(b'rev-parse', b'%s:%s' % (version, m.path))
             if ret:
                 continue
             m.node = node.strip()
 
     def getchanges(self, version, full):
         if full:
-            raise error.Abort(_("convert from git does not support --full"))
+            raise error.Abort(_(b"convert from git does not support --full"))
         self.modecache = {}
         cmd = (
-            ['diff-tree', '-z', '--root', '-m', '-r'] + self.simopt + [version]
+            [b'diff-tree', b'-z', b'--root', b'-m', b'-r']
+            + self.simopt
+            + [version]
         )
         output, status = self.gitrun(*cmd)
         if status:
-            raise error.Abort(_('cannot read changes in %s') % version)
+            raise error.Abort(_(b'cannot read changes in %s') % version)
         changes = []
         copies = {}
         seen = set()
         entry = None
         subexists = [False]
         subdeleted = [False]
-        difftree = output.split('\x00')
+        difftree = output.split(b'\x00')
         lcount = len(difftree)
         i = 0
 
-        skipsubmodules = self.ui.configbool('convert', 'git.skipsubmodules')
+        skipsubmodules = self.ui.configbool(b'convert', b'git.skipsubmodules')
 
         def add(entry, f, isdest):
             seen.add(f)
             h = entry[3]
-            p = entry[1] == "100755"
-            s = entry[1] == "120000"
-            renamesource = not isdest and entry[4][0] == 'R'
+            p = entry[1] == b"100755"
+            s = entry[1] == b"120000"
+            renamesource = not isdest and entry[4][0] == b'R'
 
-            if f == '.gitmodules':
+            if f == b'.gitmodules':
                 if skipsubmodules:
                     return
 
                 subexists[0] = True
-                if entry[4] == 'D' or renamesource:
+                if entry[4] == b'D' or renamesource:
                     subdeleted[0] = True
-                    changes.append(('.hgsub', nodemod.nullhex))
+                    changes.append((b'.hgsub', nodemod.nullhex))
                 else:
-                    changes.append(('.hgsub', ''))
-            elif entry[1] == '160000' or entry[0] == ':160000':
+                    changes.append((b'.hgsub', b''))
+            elif entry[1] == b'160000' or entry[0] == b':160000':
                 if not skipsubmodules:
                     subexists[0] = True
             else:
                 if renamesource:
                     h = nodemod.nullhex
-                self.modecache[(f, h)] = (p and "x") or (s and "l") or ""
+                self.modecache[(f, h)] = (p and b"x") or (s and b"l") or b""
                 changes.append((f, h))
 
         while i < lcount:
             l = difftree[i]
             i += 1
             if not entry:
-                if not l.startswith(':'):
+                if not l.startswith(b':'):
                     continue
                 entry = tuple(pycompat.bytestr(p) for p in l.split())
                 continue
             f = l
-            if entry[4][0] == 'C':
+            if entry[4][0] == b'C':
                 copysrc = f
                 copydest = difftree[i]
                 i += 1
@@ -336,7 +347,7 @@
                 add(entry, f, False)
             # A file can be copied multiple times, or modified and copied
             # simultaneously. So f can be repeated even if fdest isn't.
-            if entry[4][0] == 'R':
+            if entry[4][0] == b'R':
                 # rename: next line is the destination
                 fdest = difftree[i]
                 i += 1
@@ -344,21 +355,21 @@
                     add(entry, fdest, True)
                     # .gitmodules isn't imported at all, so it being copied to
                     # and fro doesn't really make sense
-                    if f != '.gitmodules' and fdest != '.gitmodules':
+                    if f != b'.gitmodules' and fdest != b'.gitmodules':
                         copies[fdest] = f
             entry = None
 
         if subexists[0]:
             if subdeleted[0]:
-                changes.append(('.hgsubstate', nodemod.nullhex))
+                changes.append((b'.hgsubstate', nodemod.nullhex))
             else:
                 self.retrievegitmodules(version)
-                changes.append(('.hgsubstate', ''))
+                changes.append((b'.hgsubstate', b''))
         return (changes, copies, set())
 
     def getcommit(self, version):
-        c = self.catfile(version, "commit")  # read the commit hash
-        end = c.find("\n\n")
+        c = self.catfile(version, b"commit")  # read the commit hash
+        end = c.find(b"\n\n")
         message = c[end + 2 :]
         message = self.recode(message)
         l = c[:end].splitlines()
@@ -366,43 +377,43 @@
         author = committer = None
         extra = {}
         for e in l[1:]:
-            n, v = e.split(" ", 1)
-            if n == "author":
+            n, v = e.split(b" ", 1)
+            if n == b"author":
                 p = v.split()
                 tm, tz = p[-2:]
-                author = " ".join(p[:-2])
-                if author[0] == "<":
+                author = b" ".join(p[:-2])
+                if author[0] == b"<":
                     author = author[1:-1]
                 author = self.recode(author)
-            if n == "committer":
+            if n == b"committer":
                 p = v.split()
                 tm, tz = p[-2:]
-                committer = " ".join(p[:-2])
-                if committer[0] == "<":
+                committer = b" ".join(p[:-2])
+                if committer[0] == b"<":
                     committer = committer[1:-1]
                 committer = self.recode(committer)
-            if n == "parent":
+            if n == b"parent":
                 parents.append(v)
             if n in self.copyextrakeys:
                 extra[n] = v
 
-        if self.committeractions['dropcommitter']:
+        if self.committeractions[b'dropcommitter']:
             committer = None
-        elif self.committeractions['replaceauthor']:
+        elif self.committeractions[b'replaceauthor']:
             author = committer
 
         if committer:
-            messagealways = self.committeractions['messagealways']
-            messagedifferent = self.committeractions['messagedifferent']
+            messagealways = self.committeractions[b'messagealways']
+            messagedifferent = self.committeractions[b'messagedifferent']
             if messagealways:
-                message += '\n%s %s\n' % (messagealways, committer)
+                message += b'\n%s %s\n' % (messagealways, committer)
             elif messagedifferent and author != committer:
-                message += '\n%s %s\n' % (messagedifferent, committer)
+                message += b'\n%s %s\n' % (messagedifferent, committer)
 
-        tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:]
+        tzs, tzh, tzm = tz[-5:-4] + b"1", tz[-4:-2], tz[-2:]
         tz = -int(tzs) * (int(tzh) * 3600 + int(tzm))
-        date = tm + " " + (b"%d" % tz)
-        saverev = self.ui.configbool('convert', 'git.saverev')
+        date = tm + b" " + (b"%d" % tz)
+        saverev = self.ui.configbool(b'convert', b'git.saverev')
 
         c = common.commit(
             parents=parents,
@@ -416,27 +427,27 @@
         return c
 
     def numcommits(self):
-        output, ret = self.gitrunlines('rev-list', '--all')
+        output, ret = self.gitrunlines(b'rev-list', b'--all')
         if ret:
             raise error.Abort(
-                _('cannot retrieve number of commits in %s') % self.path
+                _(b'cannot retrieve number of commits in %s') % self.path
             )
         return len(output)
 
     def gettags(self):
         tags = {}
         alltags = {}
-        output, status = self.gitrunlines('ls-remote', '--tags', self.path)
+        output, status = self.gitrunlines(b'ls-remote', b'--tags', self.path)
 
         if status:
-            raise error.Abort(_('cannot read tags from %s') % self.path)
-        prefix = 'refs/tags/'
+            raise error.Abort(_(b'cannot read tags from %s') % self.path)
+        prefix = b'refs/tags/'
 
         # Build complete list of tags, both annotated and bare ones
         for line in output:
             line = line.strip()
-            if line.startswith("error:") or line.startswith("fatal:"):
-                raise error.Abort(_('cannot read tags from %s') % self.path)
+            if line.startswith(b"error:") or line.startswith(b"fatal:"):
+                raise error.Abort(_(b'cannot read tags from %s') % self.path)
             node, tag = line.split(None, 1)
             if not tag.startswith(prefix):
                 continue
@@ -444,10 +455,10 @@
 
         # Filter out tag objects for annotated tag refs
         for tag in alltags:
-            if tag.endswith('^{}'):
+            if tag.endswith(b'^{}'):
                 tags[tag[:-3]] = alltags[tag]
             else:
-                if tag + '^{}' in alltags:
+                if tag + b'^{}' in alltags:
                     continue
                 else:
                     tags[tag] = alltags[tag]
@@ -458,28 +469,28 @@
         changes = []
         if i is None:
             output, status = self.gitrunlines(
-                'diff-tree', '--root', '-m', '-r', version
+                b'diff-tree', b'--root', b'-m', b'-r', version
             )
             if status:
-                raise error.Abort(_('cannot read changes in %s') % version)
+                raise error.Abort(_(b'cannot read changes in %s') % version)
             for l in output:
-                if "\t" not in l:
+                if b"\t" not in l:
                     continue
-                m, f = l[:-1].split("\t")
+                m, f = l[:-1].split(b"\t")
                 changes.append(f)
         else:
             output, status = self.gitrunlines(
-                'diff-tree',
-                '--name-only',
-                '--root',
-                '-r',
+                b'diff-tree',
+                b'--name-only',
+                b'--root',
+                b'-r',
                 version,
-                '%s^%d' % (version, i + 1),
-                '--',
+                b'%s^%d' % (version, i + 1),
+                b'--',
             )
             if status:
-                raise error.Abort(_('cannot read changes in %s') % version)
-            changes = [f.rstrip('\n') for f in output]
+                raise error.Abort(_(b'cannot read changes in %s') % version)
+            changes = [f.rstrip(b'\n') for f in output]
 
         return changes
 
@@ -487,19 +498,19 @@
         bookmarks = {}
 
         # Handle local and remote branches
-        remoteprefix = self.ui.config('convert', 'git.remoteprefix')
+        remoteprefix = self.ui.config(b'convert', b'git.remoteprefix')
         reftypes = [
             # (git prefix, hg prefix)
-            ('refs/remotes/origin/', remoteprefix + '/'),
-            ('refs/heads/', ''),
+            (b'refs/remotes/origin/', remoteprefix + b'/'),
+            (b'refs/heads/', b''),
         ]
 
         exclude = {
-            'refs/remotes/origin/HEAD',
+            b'refs/remotes/origin/HEAD',
         }
 
         try:
-            output, status = self.gitrunlines('show-ref')
+            output, status = self.gitrunlines(b'show-ref')
             for line in output:
                 line = line.strip()
                 rev, name = line.split(None, 1)
@@ -507,13 +518,13 @@
                 for gitprefix, hgprefix in reftypes:
                     if not name.startswith(gitprefix) or name in exclude:
                         continue
-                    name = '%s%s' % (hgprefix, name[len(gitprefix) :])
+                    name = b'%s%s' % (hgprefix, name[len(gitprefix) :])
                     bookmarks[name] = rev
         except Exception:
             pass
 
         return bookmarks
 
-    def checkrevformat(self, revstr, mapname='splicemap'):
+    def checkrevformat(self, revstr, mapname=b'splicemap'):
         """ git revision string is a 40 byte hex """
         self.checkhexformat(revstr, mapname)
--- a/hgext/convert/gnuarch.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/convert/gnuarch.py	Sun Oct 06 09:48:39 2019 -0400
@@ -31,9 +31,9 @@
     class gnuarch_rev(object):
         def __init__(self, rev):
             self.rev = rev
-            self.summary = ''
+            self.summary = b''
             self.date = None
-            self.author = ''
+            self.author = b''
             self.continuationof = None
             self.add_files = []
             self.mod_files = []
@@ -44,20 +44,20 @@
     def __init__(self, ui, repotype, path, revs=None):
         super(gnuarch_source, self).__init__(ui, repotype, path, revs=revs)
 
-        if not os.path.exists(os.path.join(path, '{arch}')):
+        if not os.path.exists(os.path.join(path, b'{arch}')):
             raise common.NoRepo(
-                _("%s does not look like a GNU Arch repository") % path
+                _(b"%s does not look like a GNU Arch repository") % path
             )
 
         # Could use checktool, but we want to check for baz or tla.
         self.execmd = None
-        if procutil.findexe('baz'):
-            self.execmd = 'baz'
+        if procutil.findexe(b'baz'):
+            self.execmd = b'baz'
         else:
-            if procutil.findexe('tla'):
-                self.execmd = 'tla'
+            if procutil.findexe(b'tla'):
+                self.execmd = b'tla'
             else:
-                raise error.Abort(_('cannot find a GNU Arch tool'))
+                raise error.Abort(_(b'cannot find a GNU Arch tool'))
 
         common.commandline.__init__(self, ui, self.execmd)
 
@@ -76,19 +76,19 @@
     def before(self):
         # Get registered archives
         self.archives = [
-            i.rstrip('\n') for i in self.runlines0('archives', '-n')
+            i.rstrip(b'\n') for i in self.runlines0(b'archives', b'-n')
         ]
 
-        if self.execmd == 'tla':
-            output = self.run0('tree-version', self.path)
+        if self.execmd == b'tla':
+            output = self.run0(b'tree-version', self.path)
         else:
-            output = self.run0('tree-version', '-d', self.path)
+            output = self.run0(b'tree-version', b'-d', self.path)
         self.treeversion = output.strip()
 
         # Get name of temporary directory
-        version = self.treeversion.split('/')
+        version = self.treeversion.split(b'/')
         self.tmppath = os.path.join(
-            pycompat.fsencode(tempfile.gettempdir()), 'hg-%s' % version[1]
+            pycompat.fsencode(tempfile.gettempdir()), b'hg-%s' % version[1]
         )
 
         # Generate parents dictionary
@@ -96,23 +96,25 @@
         treeversion = self.treeversion
         child = None
         while treeversion:
-            self.ui.status(_('analyzing tree version %s...\n') % treeversion)
+            self.ui.status(_(b'analyzing tree version %s...\n') % treeversion)
 
-            archive = treeversion.split('/')[0]
+            archive = treeversion.split(b'/')[0]
             if archive not in self.archives:
                 self.ui.status(
                     _(
-                        'tree analysis stopped because it points to '
-                        'an unregistered archive %s...\n'
+                        b'tree analysis stopped because it points to '
+                        b'an unregistered archive %s...\n'
                     )
                     % archive
                 )
                 break
 
             # Get the complete list of revisions for that tree version
-            output, status = self.runlines('revisions', '-r', '-f', treeversion)
+            output, status = self.runlines(
+                b'revisions', b'-r', b'-f', treeversion
+            )
             self.checkexit(
-                status, 'failed retrieving revisions for %s' % treeversion
+                status, b'failed retrieving revisions for %s' % treeversion
             )
 
             # No new iteration unless a revision has a continuation-of header
@@ -124,9 +126,9 @@
                 self.parents[rev] = []
 
                 # Read author, date and summary
-                catlog, status = self.run('cat-log', '-d', self.path, rev)
+                catlog, status = self.run(b'cat-log', b'-d', self.path, rev)
                 if status:
-                    catlog = self.run0('cat-archive-log', rev)
+                    catlog = self.run0(b'cat-archive-log', rev)
                 self._parsecatlog(catlog, rev)
 
                 # Populate the parents map
@@ -140,18 +142,18 @@
                 # or if we have to 'jump' to a different treeversion given
                 # by the continuation-of header.
                 if self.changes[rev].continuationof:
-                    treeversion = '--'.join(
-                        self.changes[rev].continuationof.split('--')[:-1]
+                    treeversion = b'--'.join(
+                        self.changes[rev].continuationof.split(b'--')[:-1]
                     )
                     break
 
                 # If we reached a base-0 revision w/o any continuation-of
                 # header, it means the tree history ends here.
-                if rev[-6:] == 'base-0':
+                if rev[-6:] == b'base-0':
                     break
 
     def after(self):
-        self.ui.debug('cleaning up %s\n' % self.tmppath)
+        self.ui.debug(b'cleaning up %s\n' % self.tmppath)
         shutil.rmtree(self.tmppath, ignore_errors=True)
 
     def getheads(self):
@@ -159,7 +161,7 @@
 
     def getfile(self, name, rev):
         if rev != self.lastrev:
-            raise error.Abort(_('internal calling inconsistency'))
+            raise error.Abort(_(b'internal calling inconsistency'))
 
         if not os.path.lexists(os.path.join(self.tmppath, name)):
             return None, None
@@ -168,7 +170,7 @@
 
     def getchanges(self, rev, full):
         if full:
-            raise error.Abort(_("convert from arch does not support --full"))
+            raise error.Abort(_(b"convert from arch does not support --full"))
         self._update(rev)
         changes = []
         copies = {}
@@ -214,14 +216,14 @@
         cmdline = [self.execmd, cmd]
         cmdline += args
         cmdline = [procutil.shellquote(arg) for arg in cmdline]
-        cmdline += ['>', os.devnull, '2>', os.devnull]
-        cmdline = procutil.quotecommand(' '.join(cmdline))
-        self.ui.debug(cmdline, '\n')
+        cmdline += [b'>', os.devnull, b'2>', os.devnull]
+        cmdline = procutil.quotecommand(b' '.join(cmdline))
+        self.ui.debug(cmdline, b'\n')
         return os.system(pycompat.rapply(procutil.tonativestr, cmdline))
 
     def _update(self, rev):
-        self.ui.debug('applying revision %s...\n' % rev)
-        changeset, status = self.runlines('replay', '-d', self.tmppath, rev)
+        self.ui.debug(b'applying revision %s...\n' % rev)
+        changeset, status = self.runlines(b'replay', b'-d', self.tmppath, rev)
         if status:
             # Something went wrong while merging (baz or tla
             # issue?), get latest revision and try from there
@@ -230,7 +232,7 @@
         else:
             old_rev = self.parents[rev][0]
             self.ui.debug(
-                'computing changeset between %s and %s...\n' % (old_rev, rev)
+                b'computing changeset between %s and %s...\n' % (old_rev, rev)
             )
             self._parsechangeset(changeset, rev)
 
@@ -239,16 +241,16 @@
         if stat.S_ISLNK(mode):
             data = util.readlink(os.path.join(self.tmppath, name))
             if mode:
-                mode = 'l'
+                mode = b'l'
             else:
-                mode = ''
+                mode = b''
         else:
             data = util.readfile(os.path.join(self.tmppath, name))
-            mode = (mode & 0o111) and 'x' or ''
+            mode = (mode & 0o111) and b'x' or b''
         return data, mode
 
     def _exclude(self, name):
-        exclude = ['{arch}', '.arch-ids', '.arch-inventory']
+        exclude = [b'{arch}', b'.arch-ids', b'.arch-inventory']
         for exc in exclude:
             if name.find(exc) != -1:
                 return True
@@ -282,15 +284,15 @@
         return changes, copies
 
     def _obtainrevision(self, rev):
-        self.ui.debug('obtaining revision %s...\n' % rev)
-        output = self._execute('get', rev, self.tmppath)
+        self.ui.debug(b'obtaining revision %s...\n' % rev)
+        output = self._execute(b'get', rev, self.tmppath)
         self.checkexit(output)
-        self.ui.debug('analyzing revision %s...\n' % rev)
+        self.ui.debug(b'analyzing revision %s...\n' % rev)
         files = self._readcontents(self.tmppath)
         self.changes[rev].add_files += files
 
     def _stripbasepath(self, path):
-        if path.startswith('./'):
+        if path.startswith(b'./'):
             return path[2:]
         return path
 
@@ -300,73 +302,73 @@
 
             # Commit date
             self.changes[rev].date = dateutil.datestr(
-                dateutil.strdate(catlog['Standard-date'], '%Y-%m-%d %H:%M:%S')
+                dateutil.strdate(catlog[b'Standard-date'], b'%Y-%m-%d %H:%M:%S')
             )
 
             # Commit author
-            self.changes[rev].author = self.recode(catlog['Creator'])
+            self.changes[rev].author = self.recode(catlog[b'Creator'])
 
             # Commit description
-            self.changes[rev].summary = '\n\n'.join(
-                (catlog['Summary'], catlog.get_payload())
+            self.changes[rev].summary = b'\n\n'.join(
+                (catlog[b'Summary'], catlog.get_payload())
             )
             self.changes[rev].summary = self.recode(self.changes[rev].summary)
 
             # Commit revision origin when dealing with a branch or tag
-            if 'Continuation-of' in catlog:
+            if b'Continuation-of' in catlog:
                 self.changes[rev].continuationof = self.recode(
-                    catlog['Continuation-of']
+                    catlog[b'Continuation-of']
                 )
         except Exception:
-            raise error.Abort(_('could not parse cat-log of %s') % rev)
+            raise error.Abort(_(b'could not parse cat-log of %s') % rev)
 
     def _parsechangeset(self, data, rev):
         for l in data:
             l = l.strip()
             # Added file (ignore added directory)
-            if l.startswith('A') and not l.startswith('A/'):
+            if l.startswith(b'A') and not l.startswith(b'A/'):
                 file = self._stripbasepath(l[1:].strip())
                 if not self._exclude(file):
                     self.changes[rev].add_files.append(file)
             # Deleted file (ignore deleted directory)
-            elif l.startswith('D') and not l.startswith('D/'):
+            elif l.startswith(b'D') and not l.startswith(b'D/'):
                 file = self._stripbasepath(l[1:].strip())
                 if not self._exclude(file):
                     self.changes[rev].del_files.append(file)
             # Modified binary file
-            elif l.startswith('Mb'):
+            elif l.startswith(b'Mb'):
                 file = self._stripbasepath(l[2:].strip())
                 if not self._exclude(file):
                     self.changes[rev].mod_files.append(file)
             # Modified link
-            elif l.startswith('M->'):
+            elif l.startswith(b'M->'):
                 file = self._stripbasepath(l[3:].strip())
                 if not self._exclude(file):
                     self.changes[rev].mod_files.append(file)
             # Modified file
-            elif l.startswith('M'):
+            elif l.startswith(b'M'):
                 file = self._stripbasepath(l[1:].strip())
                 if not self._exclude(file):
                     self.changes[rev].mod_files.append(file)
             # Renamed file (or link)
-            elif l.startswith('=>'):
-                files = l[2:].strip().split(' ')
+            elif l.startswith(b'=>'):
+                files = l[2:].strip().split(b' ')
                 if len(files) == 1:
-                    files = l[2:].strip().split('\t')
+                    files = l[2:].strip().split(b'\t')
                 src = self._stripbasepath(files[0])
                 dst = self._stripbasepath(files[1])
                 if not self._exclude(src) and not self._exclude(dst):
                     self.changes[rev].ren_files[src] = dst
             # Conversion from file to link or from link to file (modified)
-            elif l.startswith('ch'):
+            elif l.startswith(b'ch'):
                 file = self._stripbasepath(l[2:].strip())
                 if not self._exclude(file):
                     self.changes[rev].mod_files.append(file)
             # Renamed directory
-            elif l.startswith('/>'):
-                dirs = l[2:].strip().split(' ')
+            elif l.startswith(b'/>'):
+                dirs = l[2:].strip().split(b' ')
                 if len(dirs) == 1:
-                    dirs = l[2:].strip().split('\t')
+                    dirs = l[2:].strip().split(b'\t')
                 src = self._stripbasepath(dirs[0])
                 dst = self._stripbasepath(dirs[1])
                 if not self._exclude(src) and not self._exclude(dst):
--- a/hgext/convert/hg.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/convert/hg.py	Sun Oct 06 09:48:39 2019 -0400
@@ -51,33 +51,33 @@
 class mercurial_sink(common.converter_sink):
     def __init__(self, ui, repotype, path):
         common.converter_sink.__init__(self, ui, repotype, path)
-        self.branchnames = ui.configbool('convert', 'hg.usebranchnames')
-        self.clonebranches = ui.configbool('convert', 'hg.clonebranches')
-        self.tagsbranch = ui.config('convert', 'hg.tagsbranch')
+        self.branchnames = ui.configbool(b'convert', b'hg.usebranchnames')
+        self.clonebranches = ui.configbool(b'convert', b'hg.clonebranches')
+        self.tagsbranch = ui.config(b'convert', b'hg.tagsbranch')
         self.lastbranch = None
         if os.path.isdir(path) and len(os.listdir(path)) > 0:
             try:
                 self.repo = hg.repository(self.ui, path)
                 if not self.repo.local():
                     raise NoRepo(
-                        _('%s is not a local Mercurial repository') % path
+                        _(b'%s is not a local Mercurial repository') % path
                     )
             except error.RepoError as err:
                 ui.traceback()
                 raise NoRepo(err.args[0])
         else:
             try:
-                ui.status(_('initializing destination %s repository\n') % path)
+                ui.status(_(b'initializing destination %s repository\n') % path)
                 self.repo = hg.repository(self.ui, path, create=True)
                 if not self.repo.local():
                     raise NoRepo(
-                        _('%s is not a local Mercurial repository') % path
+                        _(b'%s is not a local Mercurial repository') % path
                     )
                 self.created.append(path)
             except error.RepoError:
                 ui.traceback()
                 raise NoRepo(
-                    _("could not create hg repository %s as sink") % path
+                    _(b"could not create hg repository %s as sink") % path
                 )
         self.lock = None
         self.wlock = None
@@ -85,22 +85,22 @@
         self.subrevmaps = {}
 
     def before(self):
-        self.ui.debug('run hg sink pre-conversion action\n')
+        self.ui.debug(b'run hg sink pre-conversion action\n')
         self.wlock = self.repo.wlock()
         self.lock = self.repo.lock()
 
     def after(self):
-        self.ui.debug('run hg sink post-conversion action\n')
+        self.ui.debug(b'run hg sink post-conversion action\n')
         if self.lock:
             self.lock.release()
         if self.wlock:
             self.wlock.release()
 
     def revmapfile(self):
-        return self.repo.vfs.join("shamap")
+        return self.repo.vfs.join(b"shamap")
 
     def authorfile(self):
-        return self.repo.vfs.join("authormap")
+        return self.repo.vfs.join(b"authormap")
 
     def setbranch(self, branch, pbranches):
         if not self.clonebranches:
@@ -109,8 +109,8 @@
         setbranch = branch != self.lastbranch
         self.lastbranch = branch
         if not branch:
-            branch = 'default'
-        pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
+            branch = b'default'
+        pbranches = [(b[0], b[1] and b[1] or b'default') for b in pbranches]
 
         branchpath = os.path.join(self.path, branch)
         if setbranch:
@@ -135,7 +135,9 @@
             for pbranch, heads in sorted(missings.iteritems()):
                 pbranchpath = os.path.join(self.path, pbranch)
                 prepo = hg.peer(self.ui, {}, pbranchpath)
-                self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
+                self.ui.note(
+                    _(b'pulling from %s into %s\n') % (pbranch, branch)
+                )
                 exchange.pull(
                     self.repo, prepo, [prepo.lookup(h) for h in heads]
                 )
@@ -144,10 +146,10 @@
     def _rewritetags(self, source, revmap, data):
         fp = stringio()
         for line in data.splitlines():
-            s = line.split(' ', 1)
+            s = line.split(b' ', 1)
             if len(s) != 2:
-                self.ui.warn(_('invalid tag entry: "%s"\n') % line)
-                fp.write('%s\n' % line)  # Bogus, but keep for hash stability
+                self.ui.warn(_(b'invalid tag entry: "%s"\n') % line)
+                fp.write(b'%s\n' % line)  # Bogus, but keep for hash stability
                 continue
             revid = revmap.get(source.lookuprev(s[0]))
             if not revid:
@@ -155,16 +157,16 @@
                     revid = s[0]
                 else:
                     # missing, but keep for hash stability
-                    self.ui.warn(_('missing tag entry: "%s"\n') % line)
-                    fp.write('%s\n' % line)
+                    self.ui.warn(_(b'missing tag entry: "%s"\n') % line)
+                    fp.write(b'%s\n' % line)
                     continue
-            fp.write('%s %s\n' % (revid, s[1]))
+            fp.write(b'%s %s\n' % (revid, s[1]))
         return fp.getvalue()
 
     def _rewritesubstate(self, source, data):
         fp = stringio()
         for line in data.splitlines():
-            s = line.split(' ', 1)
+            s = line.split(b' ', 1)
             if len(s) != 2:
                 continue
 
@@ -174,7 +176,7 @@
                 revmap = self.subrevmaps.get(subpath)
                 if revmap is None:
                     revmap = mapfile(
-                        self.ui, self.repo.wjoin(subpath, '.hg/shamap')
+                        self.ui, self.repo.wjoin(subpath, b'.hg/shamap')
                     )
                     self.subrevmaps[subpath] = revmap
 
@@ -182,9 +184,9 @@
                     # need to be converted, in which case they can be cloned
                     # into place instead of converted.  Therefore, only warn
                     # once.
-                    msg = _('no ".hgsubstate" updates will be made for "%s"\n')
+                    msg = _(b'no ".hgsubstate" updates will be made for "%s"\n')
                     if len(revmap) == 0:
-                        sub = self.repo.wvfs.reljoin(subpath, '.hg')
+                        sub = self.repo.wvfs.reljoin(subpath, b'.hg')
 
                         if self.repo.wvfs.exists(sub):
                             self.ui.warn(msg % subpath)
@@ -193,13 +195,13 @@
                 if not newid:
                     if len(revmap) > 0:
                         self.ui.warn(
-                            _("%s is missing from %s/.hg/shamap\n")
+                            _(b"%s is missing from %s/.hg/shamap\n")
                             % (revid, subpath)
                         )
                 else:
                     revid = newid
 
-            fp.write('%s %s\n' % (revid, subpath))
+            fp.write(b'%s %s\n' % (revid, subpath))
 
         return fp.getvalue()
 
@@ -232,16 +234,16 @@
 
             # If the file requires actual merging, abort. We don't have enough
             # context to resolve merges correctly.
-            if action in ['m', 'dm', 'cd', 'dc']:
+            if action in [b'm', b'dm', b'cd', b'dc']:
                 raise error.Abort(
                     _(
-                        "unable to convert merge commit "
-                        "since target parents do not merge cleanly (file "
-                        "%s, parents %s and %s)"
+                        b"unable to convert merge commit "
+                        b"since target parents do not merge cleanly (file "
+                        b"%s, parents %s and %s)"
                     )
                     % (file, p1ctx, p2ctx)
                 )
-            elif action == 'k':
+            elif action == b'k':
                 # 'keep' means nothing changed from p1
                 continue
             else:
@@ -255,7 +257,7 @@
 
         def getfilectx(repo, memctx, f):
             if p2ctx and f in p2files and f not in copies:
-                self.ui.debug('reusing %s from p2\n' % f)
+                self.ui.debug(b'reusing %s from p2\n' % f)
                 try:
                     return p2ctx[f]
                 except error.ManifestLookupError:
@@ -269,17 +271,17 @@
             data, mode = source.getfile(f, v)
             if data is None:
                 return None
-            if f == '.hgtags':
+            if f == b'.hgtags':
                 data = self._rewritetags(source, revmap, data)
-            if f == '.hgsubstate':
+            if f == b'.hgsubstate':
                 data = self._rewritesubstate(source, data)
             return context.memfilectx(
                 self.repo,
                 memctx,
                 f,
                 data,
-                'l' in mode,
-                'x' in mode,
+                b'l' in mode,
+                b'x' in mode,
                 copies.get(f),
             )
 
@@ -310,15 +312,15 @@
 
         extra = commit.extra.copy()
 
-        sourcename = self.repo.ui.config('convert', 'hg.sourcename')
+        sourcename = self.repo.ui.config(b'convert', b'hg.sourcename')
         if sourcename:
-            extra['convert_source'] = sourcename
+            extra[b'convert_source'] = sourcename
 
         for label in (
-            'source',
-            'transplant_source',
-            'rebase_source',
-            'intermediate-source',
+            b'source',
+            b'transplant_source',
+            b'rebase_source',
+            b'intermediate-source',
         ):
             node = extra.get(label)
 
@@ -326,20 +328,20 @@
                 continue
 
             # Only transplant stores its reference in binary
-            if label == 'transplant_source':
+            if label == b'transplant_source':
                 node = nodemod.hex(node)
 
             newrev = revmap.get(node)
             if newrev is not None:
-                if label == 'transplant_source':
+                if label == b'transplant_source':
                     newrev = nodemod.bin(newrev)
 
                 extra[label] = newrev
 
         if self.branchnames and commit.branch:
-            extra['branch'] = commit.branch
+            extra[b'branch'] = commit.branch
         if commit.rev and commit.saverev:
-            extra['convert_revision'] = commit.rev
+            extra[b'convert_revision'] = commit.rev
 
         while parents:
             p1 = p2
@@ -373,14 +375,14 @@
             # We won't know if the conversion changes the node until after the
             # commit, so copy the source's phase for now.
             self.repo.ui.setconfig(
-                'phases',
-                'new-commit',
+                b'phases',
+                b'new-commit',
                 phases.phasenames[commit.phase],
-                'convert',
+                b'convert',
             )
 
-            with self.repo.transaction("convert") as tr:
-                if self.repo.ui.config('convert', 'hg.preserve-hash'):
+            with self.repo.transaction(b"convert") as tr:
+                if self.repo.ui.config(b'convert', b'hg.preserve-hash'):
                     origctx = commit.ctx
                 else:
                     origctx = None
@@ -396,15 +398,15 @@
                             self.repo, tr, phases.draft, [ctx.node()]
                         )
 
-            text = "(octopus merge fixup)\n"
+            text = b"(octopus merge fixup)\n"
             p2 = node
 
         if self.filemapmode and nparents == 1:
             man = self.repo.manifestlog.getstorage(b'')
             mnode = self.repo.changelog.read(nodemod.bin(p2))[0]
-            closed = 'close' in commit.extra
+            closed = b'close' in commit.extra
             if not closed and not man.cmp(m1node, man.revision(mnode)):
-                self.ui.status(_("filtering out empty revision\n"))
+                self.ui.status(_(b"filtering out empty revision\n"))
                 self.repo.rollback(force=True)
                 return parent
         return p2
@@ -416,13 +418,13 @@
         oldlines = set()
         for branch, heads in self.repo.branchmap().iteritems():
             for h in heads:
-                if '.hgtags' in self.repo[h]:
+                if b'.hgtags' in self.repo[h]:
                     oldlines.update(
-                        set(self.repo[h]['.hgtags'].data().splitlines(True))
+                        set(self.repo[h][b'.hgtags'].data().splitlines(True))
                     )
         oldlines = sorted(list(oldlines))
 
-        newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
+        newlines = sorted([(b"%s %s\n" % (tags[tag], tag)) for tag in tags])
         if newlines == oldlines:
             return None, None
 
@@ -430,12 +432,12 @@
         oldtags = set()
         newtags = set()
         for line in oldlines:
-            s = line.strip().split(' ', 1)
+            s = line.strip().split(b' ', 1)
             if len(s) != 2:
                 continue
             oldtags.add(s[1])
         for line in newlines:
-            s = line.strip().split(' ', 1)
+            s = line.strip().split(b' ', 1)
             if len(s) != 2:
                 continue
             if s[1] not in oldtags:
@@ -444,21 +446,21 @@
         if not newtags:
             return None, None
 
-        data = "".join(newlines)
+        data = b"".join(newlines)
 
         def getfilectx(repo, memctx, f):
             return context.memfilectx(repo, memctx, f, data, False, False, None)
 
-        self.ui.status(_("updating tags\n"))
-        date = "%d 0" % int(time.mktime(time.gmtime()))
-        extra = {'branch': self.tagsbranch}
+        self.ui.status(_(b"updating tags\n"))
+        date = b"%d 0" % int(time.mktime(time.gmtime()))
+        extra = {b'branch': self.tagsbranch}
         ctx = context.memctx(
             self.repo,
             (tagparent, None),
-            "update tags",
-            [".hgtags"],
+            b"update tags",
+            [b".hgtags"],
             getfilectx,
-            "convert-repo",
+            b"convert-repo",
             date,
             extra,
         )
@@ -475,8 +477,8 @@
         try:
             wlock = self.repo.wlock()
             lock = self.repo.lock()
-            tr = self.repo.transaction('bookmark')
-            self.ui.status(_("updating bookmarks\n"))
+            tr = self.repo.transaction(b'bookmark')
+            self.ui.status(_(b"updating bookmarks\n"))
             destmarks = self.repo._bookmarks
             changes = [
                 (bookmark, nodemod.bin(updatedbookmark[bookmark]))
@@ -495,9 +497,9 @@
         if rev not in self.repo and self.clonebranches:
             raise error.Abort(
                 _(
-                    'revision %s not found in destination '
-                    'repository (lookups with clonebranches=true '
-                    'are not implemented)'
+                    b'revision %s not found in destination '
+                    b'repository (lookups with clonebranches=true '
+                    b'are not implemented)'
                 )
                 % rev
             )
@@ -507,9 +509,9 @@
 class mercurial_source(common.converter_source):
     def __init__(self, ui, repotype, path, revs=None):
         common.converter_source.__init__(self, ui, repotype, path, revs)
-        self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors')
+        self.ignoreerrors = ui.configbool(b'convert', b'hg.ignoreerrors')
         self.ignored = set()
-        self.saverev = ui.configbool('convert', 'hg.saverev')
+        self.saverev = ui.configbool(b'convert', b'hg.saverev')
         try:
             self.repo = hg.repository(self.ui, path)
             # try to provoke an exception if this isn't really a hg
@@ -518,21 +520,21 @@
                 raise error.RepoError
         except error.RepoError:
             ui.traceback()
-            raise NoRepo(_("%s is not a local Mercurial repository") % path)
+            raise NoRepo(_(b"%s is not a local Mercurial repository") % path)
         self.lastrev = None
         self.lastctx = None
         self._changescache = None, None
         self.convertfp = None
         # Restrict converted revisions to startrev descendants
-        startnode = ui.config('convert', 'hg.startrev')
-        hgrevs = ui.config('convert', 'hg.revs')
+        startnode = ui.config(b'convert', b'hg.startrev')
+        hgrevs = ui.config(b'convert', b'hg.revs')
         if hgrevs is None:
             if startnode is not None:
                 try:
                     startnode = self.repo.lookup(startnode)
                 except error.RepoError:
                     raise error.Abort(
-                        _('%s is not a valid start revision') % startnode
+                        _(b'%s is not a valid start revision') % startnode
                     )
                 startrev = self.repo.changelog.rev(startnode)
                 children = {startnode: 1}
@@ -548,7 +550,10 @@
         else:
             if revs or startnode is not None:
                 raise error.Abort(
-                    _('hg.revs cannot be combined with ' 'hg.startrev or --rev')
+                    _(
+                        b'hg.revs cannot be combined with '
+                        b'hg.startrev or --rev'
+                    )
                 )
             nodes = set()
             parents = set()
@@ -635,7 +640,7 @@
                 if not self.ignoreerrors:
                     raise
                 self.ignored.add(name)
-                self.ui.warn(_('ignoring: %s\n') % e)
+                self.ui.warn(_(b'ignoring: %s\n') % e)
         return copies
 
     def getcommit(self, rev):
@@ -647,7 +652,7 @@
 
         return common.commit(
             author=ctx.user(),
-            date=dateutil.datestr(ctx.date(), '%Y-%m-%d %H:%M:%S %1%2'),
+            date=dateutil.datestr(ctx.date(), b'%Y-%m-%d %H:%M:%S %1%2'),
             desc=ctx.description(),
             rev=crev,
             parents=parents,
@@ -668,7 +673,7 @@
         tags = [
             t
             for t in self.repo.tagslist()
-            if self.repo.tagtype(t[0]) == 'global'
+            if self.repo.tagtype(t[0]) == b'global'
         ]
         return dict(
             [
@@ -696,15 +701,15 @@
 
     def converted(self, rev, destrev):
         if self.convertfp is None:
-            self.convertfp = open(self.repo.vfs.join('shamap'), 'ab')
-        self.convertfp.write(util.tonativeeol('%s %s\n' % (destrev, rev)))
+            self.convertfp = open(self.repo.vfs.join(b'shamap'), b'ab')
+        self.convertfp.write(util.tonativeeol(b'%s %s\n' % (destrev, rev)))
         self.convertfp.flush()
 
     def before(self):
-        self.ui.debug('run hg source pre-conversion action\n')
+        self.ui.debug(b'run hg source pre-conversion action\n')
 
     def after(self):
-        self.ui.debug('run hg source post-conversion action\n')
+        self.ui.debug(b'run hg source post-conversion action\n')
 
     def hasnativeorder(self):
         return True
@@ -721,6 +726,6 @@
     def getbookmarks(self):
         return bookmarks.listbookmarks(self.repo)
 
-    def checkrevformat(self, revstr, mapname='splicemap'):
+    def checkrevformat(self, revstr, mapname=b'splicemap'):
         """ Mercurial, revision string is a 40 byte hex """
         self.checkhexformat(revstr, mapname)
--- a/hgext/convert/monotone.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/convert/monotone.py	Sun Oct 06 09:48:39 2019 -0400
@@ -26,11 +26,11 @@
         if revs and len(revs) > 1:
             raise error.Abort(
                 _(
-                    'monotone source does not support specifying '
-                    'multiple revs'
+                    b'monotone source does not support specifying '
+                    b'multiple revs'
                 )
             )
-        common.commandline.__init__(self, ui, 'mtn')
+        common.commandline.__init__(self, ui, b'mtn')
 
         self.ui = ui
         self.path = path
@@ -38,17 +38,17 @@
         self.revs = revs
 
         norepo = common.NoRepo(
-            _("%s does not look like a monotone repository") % path
+            _(b"%s does not look like a monotone repository") % path
         )
-        if not os.path.exists(os.path.join(path, '_MTN')):
+        if not os.path.exists(os.path.join(path, b'_MTN')):
             # Could be a monotone repository (SQLite db file)
             try:
-                f = open(path, 'rb')
+                f = open(path, b'rb')
                 header = f.read(16)
                 f.close()
             except IOError:
-                header = ''
-            if header != 'SQLite format 3\x00':
+                header = b''
+            if header != b'SQLite format 3\x00':
                 raise norepo
 
         # regular expressions for parsing monotone output
@@ -58,24 +58,26 @@
         revision = br'\s+\[(\w+)\]\s*'
         lines = br'(?:.|\n)+'
 
-        self.dir_re = re.compile(space + "dir" + name)
-        self.file_re = re.compile(space + "file" + name + "content" + revision)
+        self.dir_re = re.compile(space + b"dir" + name)
+        self.file_re = re.compile(
+            space + b"file" + name + b"content" + revision
+        )
         self.add_file_re = re.compile(
-            space + "add_file" + name + "content" + revision
+            space + b"add_file" + name + b"content" + revision
         )
         self.patch_re = re.compile(
-            space + "patch" + name + "from" + revision + "to" + revision
+            space + b"patch" + name + b"from" + revision + b"to" + revision
         )
-        self.rename_re = re.compile(space + "rename" + name + "to" + name)
-        self.delete_re = re.compile(space + "delete" + name)
-        self.tag_re = re.compile(space + "tag" + name + "revision" + revision)
+        self.rename_re = re.compile(space + b"rename" + name + b"to" + name)
+        self.delete_re = re.compile(space + b"delete" + name)
+        self.tag_re = re.compile(space + b"tag" + name + b"revision" + revision)
         self.cert_re = re.compile(
-            lines + space + "name" + name + "value" + value
+            lines + space + b"name" + name + b"value" + value
         )
 
-        attr = space + "file" + lines + space + "attr" + space
+        attr = space + b"file" + lines + space + b"attr" + space
         self.attr_execute_re = re.compile(
-            attr + '"mtn:execute"' + space + '"true"'
+            attr + b'"mtn:execute"' + space + b'"true"'
         )
 
         # cached data
@@ -84,7 +86,7 @@
         self.files = None
         self.dirs = None
 
-        common.checktool('mtn', abort=False)
+        common.checktool(b'mtn', abort=False)
 
     def mtnrun(self, *args, **kwargs):
         if self.automatestdio:
@@ -94,27 +96,27 @@
 
     def mtnrunsingle(self, *args, **kwargs):
         kwargs[r'd'] = self.path
-        return self.run0('automate', *args, **kwargs)
+        return self.run0(b'automate', *args, **kwargs)
 
     def mtnrunstdio(self, *args, **kwargs):
         # Prepare the command in automate stdio format
         kwargs = pycompat.byteskwargs(kwargs)
         command = []
         for k, v in kwargs.iteritems():
-            command.append("%d:%s" % (len(k), k))
+            command.append(b"%d:%s" % (len(k), k))
             if v:
-                command.append("%d:%s" % (len(v), v))
+                command.append(b"%d:%s" % (len(v), v))
         if command:
-            command.insert(0, 'o')
-            command.append('e')
+            command.insert(0, b'o')
+            command.append(b'e')
 
-        command.append('l')
+        command.append(b'l')
         for arg in args:
-            command.append("%d:%s" % (len(arg), arg))
-        command.append('e')
-        command = ''.join(command)
+            command.append(b"%d:%s" % (len(arg), arg))
+        command.append(b'e')
+        command = b''.join(command)
 
-        self.ui.debug("mtn: sending '%s'\n" % command)
+        self.ui.debug(b"mtn: sending '%s'\n" % command)
         self.mtnwritefp.write(command)
         self.mtnwritefp.flush()
 
@@ -122,42 +124,44 @@
 
     def mtnstdioreadpacket(self):
         read = None
-        commandnbr = ''
-        while read != ':':
+        commandnbr = b''
+        while read != b':':
             read = self.mtnreadfp.read(1)
             if not read:
-                raise error.Abort(_('bad mtn packet - no end of commandnbr'))
+                raise error.Abort(_(b'bad mtn packet - no end of commandnbr'))
             commandnbr += read
         commandnbr = commandnbr[:-1]
 
         stream = self.mtnreadfp.read(1)
-        if stream not in 'mewptl':
-            raise error.Abort(_('bad mtn packet - bad stream type %s') % stream)
+        if stream not in b'mewptl':
+            raise error.Abort(
+                _(b'bad mtn packet - bad stream type %s') % stream
+            )
 
         read = self.mtnreadfp.read(1)
-        if read != ':':
-            raise error.Abort(_('bad mtn packet - no divider before size'))
+        if read != b':':
+            raise error.Abort(_(b'bad mtn packet - no divider before size'))
 
         read = None
-        lengthstr = ''
-        while read != ':':
+        lengthstr = b''
+        while read != b':':
             read = self.mtnreadfp.read(1)
             if not read:
-                raise error.Abort(_('bad mtn packet - no end of packet size'))
+                raise error.Abort(_(b'bad mtn packet - no end of packet size'))
             lengthstr += read
         try:
             length = pycompat.long(lengthstr[:-1])
         except TypeError:
             raise error.Abort(
-                _('bad mtn packet - bad packet size %s') % lengthstr
+                _(b'bad mtn packet - bad packet size %s') % lengthstr
             )
 
         read = self.mtnreadfp.read(length)
         if len(read) != length:
             raise error.Abort(
                 _(
-                    "bad mtn packet - unable to read full packet "
-                    "read %s of %s"
+                    b"bad mtn packet - unable to read full packet "
+                    b"read %s of %s"
                 )
                 % (len(read), length)
             )
@@ -169,33 +173,33 @@
         while True:
             commandnbr, stream, length, output = self.mtnstdioreadpacket()
             self.ui.debug(
-                'mtn: read packet %s:%s:%d\n' % (commandnbr, stream, length)
+                b'mtn: read packet %s:%s:%d\n' % (commandnbr, stream, length)
             )
 
-            if stream == 'l':
+            if stream == b'l':
                 # End of command
-                if output != '0':
+                if output != b'0':
                     raise error.Abort(
-                        _("mtn command '%s' returned %s") % (command, output)
+                        _(b"mtn command '%s' returned %s") % (command, output)
                     )
                 break
-            elif stream in 'ew':
+            elif stream in b'ew':
                 # Error, warning output
-                self.ui.warn(_('%s error:\n') % self.command)
+                self.ui.warn(_(b'%s error:\n') % self.command)
                 self.ui.warn(output)
-            elif stream == 'p':
+            elif stream == b'p':
                 # Progress messages
-                self.ui.debug('mtn: ' + output)
-            elif stream == 'm':
+                self.ui.debug(b'mtn: ' + output)
+            elif stream == b'm':
                 # Main stream - command output
                 retval.append(output)
 
-        return ''.join(retval)
+        return b''.join(retval)
 
     def mtnloadmanifest(self, rev):
         if self.manifest_rev == rev:
             return
-        self.manifest = self.mtnrun("get_manifest_of", rev).split("\n\n")
+        self.manifest = self.mtnrun(b"get_manifest_of", rev).split(b"\n\n")
         self.manifest_rev = rev
         self.files = {}
         self.dirs = {}
@@ -203,11 +207,11 @@
         for e in self.manifest:
             m = self.file_re.match(e)
             if m:
-                attr = ""
+                attr = b""
                 name = m.group(1)
                 node = m.group(2)
                 if self.attr_execute_re.match(e):
-                    attr += "x"
+                    attr += b"x"
                 self.files[name] = (node, attr)
             m = self.dir_re.match(e)
             if m:
@@ -224,12 +228,12 @@
 
     def mtngetcerts(self, rev):
         certs = {
-            "author": "<missing>",
-            "date": "<missing>",
-            "changelog": "<missing>",
-            "branch": "<missing>",
+            b"author": b"<missing>",
+            b"date": b"<missing>",
+            b"changelog": b"<missing>",
+            b"branch": b"<missing>",
         }
-        certlist = self.mtnrun("certs", rev)
+        certlist = self.mtnrun(b"certs", rev)
         # mtn < 0.45:
         #   key "test@selenic.com"
         # mtn >= 0.45:
@@ -239,28 +243,28 @@
             m = self.cert_re.match(e)
             if m:
                 name, value = m.groups()
-                value = value.replace(br'\"', '"')
-                value = value.replace(br'\\', '\\')
+                value = value.replace(br'\"', b'"')
+                value = value.replace(br'\\', b'\\')
                 certs[name] = value
         # Monotone may have subsecond dates: 2005-02-05T09:39:12.364306
         # and all times are stored in UTC
-        certs["date"] = certs["date"].split('.')[0] + " UTC"
+        certs[b"date"] = certs[b"date"].split(b'.')[0] + b" UTC"
         return certs
 
     # implement the converter_source interface:
 
     def getheads(self):
         if not self.revs:
-            return self.mtnrun("leaves").splitlines()
+            return self.mtnrun(b"leaves").splitlines()
         else:
             return self.revs
 
     def getchanges(self, rev, full):
         if full:
             raise error.Abort(
-                _("convert from monotone does not support " "--full")
+                _(b"convert from monotone does not support " b"--full")
             )
-        revision = self.mtnrun("get_revision", rev).split("\n\n")
+        revision = self.mtnrun(b"get_revision", rev).split(b"\n\n")
         files = {}
         ignoremove = {}
         renameddirs = []
@@ -298,7 +302,7 @@
             for tofile in self.files:
                 if tofile in ignoremove:
                     continue
-                if tofile.startswith(todir + '/'):
+                if tofile.startswith(todir + b'/'):
                     renamed[tofile] = fromdir + tofile[len(todir) :]
                     # Avoid chained moves like:
                     # d1(/a) => d3/d1(/a)
@@ -306,9 +310,9 @@
                     ignoremove[tofile] = 1
             for tofile, fromfile in renamed.items():
                 self.ui.debug(
-                    "copying file in renamed directory from '%s' to '%s'"
+                    b"copying file in renamed directory from '%s' to '%s'"
                     % (fromfile, tofile),
-                    '\n',
+                    b'\n',
                 )
                 files[tofile] = rev
                 copies[tofile] = fromfile
@@ -321,32 +325,32 @@
         if not self.mtnisfile(name, rev):
             return None, None
         try:
-            data = self.mtnrun("get_file_of", name, r=rev)
+            data = self.mtnrun(b"get_file_of", name, r=rev)
         except Exception:
             return None, None
         self.mtnloadmanifest(rev)
-        node, attr = self.files.get(name, (None, ""))
+        node, attr = self.files.get(name, (None, b""))
         return data, attr
 
     def getcommit(self, rev):
         extra = {}
         certs = self.mtngetcerts(rev)
-        if certs.get('suspend') == certs["branch"]:
-            extra['close'] = 1
-        dateformat = "%Y-%m-%dT%H:%M:%S"
+        if certs.get(b'suspend') == certs[b"branch"]:
+            extra[b'close'] = 1
+        dateformat = b"%Y-%m-%dT%H:%M:%S"
         return common.commit(
-            author=certs["author"],
-            date=dateutil.datestr(dateutil.strdate(certs["date"], dateformat)),
-            desc=certs["changelog"],
+            author=certs[b"author"],
+            date=dateutil.datestr(dateutil.strdate(certs[b"date"], dateformat)),
+            desc=certs[b"changelog"],
             rev=rev,
-            parents=self.mtnrun("parents", rev).splitlines(),
-            branch=certs["branch"],
+            parents=self.mtnrun(b"parents", rev).splitlines(),
+            branch=certs[b"branch"],
             extra=extra,
         )
 
     def gettags(self):
         tags = {}
-        for e in self.mtnrun("tags").split("\n\n"):
+        for e in self.mtnrun(b"tags").split(b"\n\n"):
             m = self.tag_re.match(e)
             if m:
                 tags[m.group(1)] = m.group(2)
@@ -360,42 +364,42 @@
     def before(self):
         # Check if we have a new enough version to use automate stdio
         try:
-            versionstr = self.mtnrunsingle("interface_version")
+            versionstr = self.mtnrunsingle(b"interface_version")
             version = float(versionstr)
         except Exception:
             raise error.Abort(
-                _("unable to determine mtn automate interface " "version")
+                _(b"unable to determine mtn automate interface " b"version")
             )
 
         if version >= 12.0:
             self.automatestdio = True
             self.ui.debug(
-                "mtn automate version %f - using automate stdio\n" % version
+                b"mtn automate version %f - using automate stdio\n" % version
             )
 
             # launch the long-running automate stdio process
             self.mtnwritefp, self.mtnreadfp = self._run2(
-                'automate', 'stdio', '-d', self.path
+                b'automate', b'stdio', b'-d', self.path
             )
             # read the headers
             read = self.mtnreadfp.readline()
-            if read != 'format-version: 2\n':
+            if read != b'format-version: 2\n':
                 raise error.Abort(
-                    _('mtn automate stdio header unexpected: %s') % read
+                    _(b'mtn automate stdio header unexpected: %s') % read
                 )
-            while read != '\n':
+            while read != b'\n':
                 read = self.mtnreadfp.readline()
                 if not read:
                     raise error.Abort(
                         _(
-                            "failed to reach end of mtn automate "
-                            "stdio headers"
+                            b"failed to reach end of mtn automate "
+                            b"stdio headers"
                         )
                     )
         else:
             self.ui.debug(
-                "mtn automate version %s - not using automate stdio "
-                "(automate >= 12.0 - mtn >= 0.46 is needed)\n" % version
+                b"mtn automate version %s - not using automate stdio "
+                b"(automate >= 12.0 - mtn >= 0.46 is needed)\n" % version
             )
 
     def after(self):
--- a/hgext/convert/p4.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/convert/p4.py	Sun Oct 06 09:48:39 2019 -0400
@@ -24,7 +24,7 @@
 
 
 def loaditer(f):
-    "Yield the dictionary objects generated by p4"
+    b"Yield the dictionary objects generated by p4"
     try:
         while True:
             d = marshal.load(f)
@@ -44,7 +44,12 @@
     >>> decodefilename(b'//Depot/Directory/%2525/%2523/%23%40.%2A')
     '//Depot/Directory/%25/%23/#@.*'
     """
-    replacements = [('%2A', '*'), ('%23', '#'), ('%40', '@'), ('%25', '%')]
+    replacements = [
+        (b'%2A', b'*'),
+        (b'%23', b'#'),
+        (b'%40', b'@'),
+        (b'%25', b'%'),
+    ]
     for k, v in replacements:
         filename = filename.replace(k, v)
     return filename
@@ -57,16 +62,16 @@
 
         super(p4_source, self).__init__(ui, repotype, path, revs=revs)
 
-        if "/" in path and not path.startswith('//'):
+        if b"/" in path and not path.startswith(b'//'):
             raise common.NoRepo(
-                _('%s does not look like a P4 repository') % path
+                _(b'%s does not look like a P4 repository') % path
             )
 
-        common.checktool('p4', abort=False)
+        common.checktool(b'p4', abort=False)
 
         self.revmap = {}
         self.encoding = self.ui.config(
-            'convert', 'p4.encoding', convcmd.orig_encoding
+            b'convert', b'p4.encoding', convcmd.orig_encoding
         )
         self.re_type = re.compile(
             br"([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)"
@@ -80,7 +85,10 @@
 
         if revs and len(revs) > 1:
             raise error.Abort(
-                _("p4 source does not support specifying " "multiple revisions")
+                _(
+                    b"p4 source does not support specifying "
+                    b"multiple revisions"
+                )
             )
 
     def setrevmap(self, revmap):
@@ -97,18 +105,18 @@
         self.revmap = revmap
 
     def _parse_view(self, path):
-        "Read changes affecting the path"
-        cmd = 'p4 -G changes -s submitted %s' % procutil.shellquote(path)
-        stdout = procutil.popen(cmd, mode='rb')
+        b"Read changes affecting the path"
+        cmd = b'p4 -G changes -s submitted %s' % procutil.shellquote(path)
+        stdout = procutil.popen(cmd, mode=b'rb')
         p4changes = {}
         for d in loaditer(stdout):
-            c = d.get("change", None)
+            c = d.get(b"change", None)
             if c:
                 p4changes[c] = True
         return p4changes
 
     def _parse(self, ui, path):
-        "Prepare list of P4 filenames and revisions to import"
+        b"Prepare list of P4 filenames and revisions to import"
         p4changes = {}
         changeset = {}
         files_map = {}
@@ -117,29 +125,29 @@
         depotname = {}
         heads = []
 
-        ui.status(_('reading p4 views\n'))
+        ui.status(_(b'reading p4 views\n'))
 
         # read client spec or view
-        if "/" in path:
+        if b"/" in path:
             p4changes.update(self._parse_view(path))
-            if path.startswith("//") and path.endswith("/..."):
-                views = {path[:-3]: ""}
+            if path.startswith(b"//") and path.endswith(b"/..."):
+                views = {path[:-3]: b""}
             else:
-                views = {"//": ""}
+                views = {b"//": b""}
         else:
-            cmd = 'p4 -G client -o %s' % procutil.shellquote(path)
-            clientspec = marshal.load(procutil.popen(cmd, mode='rb'))
+            cmd = b'p4 -G client -o %s' % procutil.shellquote(path)
+            clientspec = marshal.load(procutil.popen(cmd, mode=b'rb'))
 
             views = {}
             for client in clientspec:
-                if client.startswith("View"):
+                if client.startswith(b"View"):
                     sview, cview = clientspec[client].split()
                     p4changes.update(self._parse_view(sview))
-                    if sview.endswith("...") and cview.endswith("..."):
+                    if sview.endswith(b"...") and cview.endswith(b"..."):
                         sview = sview[:-3]
                         cview = cview[:-3]
                     cview = cview[2:]
-                    cview = cview[cview.find("/") + 1 :]
+                    cview = cview[cview.find(b"/") + 1 :]
                     views[sview] = cview
 
         # list of changes that affect our source files
@@ -151,10 +159,10 @@
         vieworder.sort(key=len, reverse=True)
 
         # handle revision limiting
-        startrev = self.ui.config('convert', 'p4.startrev')
+        startrev = self.ui.config(b'convert', b'p4.startrev')
 
         # now read the full changelists to get the list of file revisions
-        ui.status(_('collecting p4 changelists\n'))
+        ui.status(_(b'collecting p4 changelists\n'))
         lastid = None
         for change in p4changes:
             if startrev and int(change) < int(startrev):
@@ -176,28 +184,28 @@
 
             descarr = c.desc.splitlines(True)
             if len(descarr) > 0:
-                shortdesc = descarr[0].rstrip('\r\n')
+                shortdesc = descarr[0].rstrip(b'\r\n')
             else:
-                shortdesc = '**empty changelist description**'
+                shortdesc = b'**empty changelist description**'
 
-            t = '%s %s' % (c.rev, repr(shortdesc)[1:-1])
-            ui.status(stringutil.ellipsis(t, 80) + '\n')
+            t = b'%s %s' % (c.rev, repr(shortdesc)[1:-1])
+            ui.status(stringutil.ellipsis(t, 80) + b'\n')
 
             files = []
             copies = {}
             copiedfiles = []
             i = 0
-            while ("depotFile%d" % i) in d and ("rev%d" % i) in d:
-                oldname = d["depotFile%d" % i]
+            while (b"depotFile%d" % i) in d and (b"rev%d" % i) in d:
+                oldname = d[b"depotFile%d" % i]
                 filename = None
                 for v in vieworder:
                     if oldname.lower().startswith(v.lower()):
                         filename = decodefilename(views[v] + oldname[len(v) :])
                         break
                 if filename:
-                    files.append((filename, d["rev%d" % i]))
+                    files.append((filename, d[b"rev%d" % i]))
                     depotname[filename] = oldname
-                    if d.get("action%d" % i) == "move/add":
+                    if d.get(b"action%d" % i) == b"move/add":
                         copiedfiles.append(filename)
                     localname[oldname] = filename
                 i += 1
@@ -206,23 +214,23 @@
             for filename in copiedfiles:
                 oldname = depotname[filename]
 
-                flcmd = 'p4 -G filelog %s' % procutil.shellquote(oldname)
-                flstdout = procutil.popen(flcmd, mode='rb')
+                flcmd = b'p4 -G filelog %s' % procutil.shellquote(oldname)
+                flstdout = procutil.popen(flcmd, mode=b'rb')
 
                 copiedfilename = None
                 for d in loaditer(flstdout):
                     copiedoldname = None
 
                     i = 0
-                    while ("change%d" % i) in d:
+                    while (b"change%d" % i) in d:
                         if (
-                            d["change%d" % i] == change
-                            and d["action%d" % i] == "move/add"
+                            d[b"change%d" % i] == change
+                            and d[b"action%d" % i] == b"move/add"
                         ):
                             j = 0
-                            while ("file%d,%d" % (i, j)) in d:
-                                if d["how%d,%d" % (i, j)] == "moved from":
-                                    copiedoldname = d["file%d,%d" % (i, j)]
+                            while (b"file%d,%d" % (i, j)) in d:
+                                if d[b"how%d,%d" % (i, j)] == b"moved from":
+                                    copiedoldname = d[b"file%d,%d" % (i, j)]
                                     break
                                 j += 1
                         i += 1
@@ -235,7 +243,7 @@
                     copies[filename] = copiedfilename
                 else:
                     ui.warn(
-                        _("cannot find source for copied file: %s@%s\n")
+                        _(b"cannot find source for copied file: %s@%s\n")
                         % (filename, change)
                     )
 
@@ -248,11 +256,11 @@
             heads = [lastid]
 
         return {
-            'changeset': changeset,
-            'files': files_map,
-            'copies': copies_map,
-            'heads': heads,
-            'depotname': depotname,
+            b'changeset': changeset,
+            b'files': files_map,
+            b'copies': copies_map,
+            b'heads': heads,
+            b'depotname': depotname,
         }
 
     @util.propertycache
@@ -261,74 +269,74 @@
 
     @util.propertycache
     def copies(self):
-        return self._parse_once['copies']
+        return self._parse_once[b'copies']
 
     @util.propertycache
     def files(self):
-        return self._parse_once['files']
+        return self._parse_once[b'files']
 
     @util.propertycache
     def changeset(self):
-        return self._parse_once['changeset']
+        return self._parse_once[b'changeset']
 
     @util.propertycache
     def heads(self):
-        return self._parse_once['heads']
+        return self._parse_once[b'heads']
 
     @util.propertycache
     def depotname(self):
-        return self._parse_once['depotname']
+        return self._parse_once[b'depotname']
 
     def getheads(self):
         return self.heads
 
     def getfile(self, name, rev):
-        cmd = 'p4 -G print %s' % procutil.shellquote(
-            "%s#%s" % (self.depotname[name], rev)
+        cmd = b'p4 -G print %s' % procutil.shellquote(
+            b"%s#%s" % (self.depotname[name], rev)
         )
 
         lasterror = None
         while True:
-            stdout = procutil.popen(cmd, mode='rb')
+            stdout = procutil.popen(cmd, mode=b'rb')
 
             mode = None
             contents = []
             keywords = None
 
             for d in loaditer(stdout):
-                code = d["code"]
-                data = d.get("data")
+                code = d[b"code"]
+                data = d.get(b"data")
 
-                if code == "error":
+                if code == b"error":
                     # if this is the first time error happened
                     # re-attempt getting the file
                     if not lasterror:
-                        lasterror = IOError(d["generic"], data)
+                        lasterror = IOError(d[b"generic"], data)
                         # this will exit inner-most for-loop
                         break
                     else:
                         raise lasterror
 
-                elif code == "stat":
-                    action = d.get("action")
-                    if action in ["purge", "delete", "move/delete"]:
+                elif code == b"stat":
+                    action = d.get(b"action")
+                    if action in [b"purge", b"delete", b"move/delete"]:
                         return None, None
-                    p4type = self.re_type.match(d["type"])
+                    p4type = self.re_type.match(d[b"type"])
                     if p4type:
-                        mode = ""
-                        flags = (p4type.group(1) or "") + (
-                            p4type.group(3) or ""
+                        mode = b""
+                        flags = (p4type.group(1) or b"") + (
+                            p4type.group(3) or b""
                         )
-                        if "x" in flags:
-                            mode = "x"
-                        if p4type.group(2) == "symlink":
-                            mode = "l"
-                        if "ko" in flags:
+                        if b"x" in flags:
+                            mode = b"x"
+                        if p4type.group(2) == b"symlink":
+                            mode = b"l"
+                        if b"ko" in flags:
                             keywords = self.re_keywords_old
-                        elif "k" in flags:
+                        elif b"k" in flags:
                             keywords = self.re_keywords
 
-                elif code == "text" or code == "binary":
+                elif code == b"text" or code == b"binary":
                     contents.append(data)
 
                 lasterror = None
@@ -339,18 +347,18 @@
         if mode is None:
             return None, None
 
-        contents = ''.join(contents)
+        contents = b''.join(contents)
 
         if keywords:
-            contents = keywords.sub("$\\1$", contents)
-        if mode == "l" and contents.endswith("\n"):
+            contents = keywords.sub(b"$\\1$", contents)
+        if mode == b"l" and contents.endswith(b"\n"):
             contents = contents[:-1]
 
         return contents, mode
 
     def getchanges(self, rev, full):
         if full:
-            raise error.Abort(_("convert from p4 does not support --full"))
+            raise error.Abort(_(b"convert from p4 does not support --full"))
         return self.files[rev], self.copies[rev], set()
 
     def _construct_commit(self, obj, parents=None):
@@ -358,26 +366,26 @@
         Constructs a common.commit object from an unmarshalled
         `p4 describe` output
         """
-        desc = self.recode(obj.get("desc", ""))
-        date = (int(obj["time"]), 0)  # timezone not set
+        desc = self.recode(obj.get(b"desc", b""))
+        date = (int(obj[b"time"]), 0)  # timezone not set
         if parents is None:
             parents = []
 
         return common.commit(
-            author=self.recode(obj["user"]),
-            date=dateutil.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
+            author=self.recode(obj[b"user"]),
+            date=dateutil.datestr(date, b'%Y-%m-%d %H:%M:%S %1%2'),
             parents=parents,
             desc=desc,
             branch=None,
-            rev=obj['change'],
-            extra={"p4": obj['change'], "convert_revision": obj['change']},
+            rev=obj[b'change'],
+            extra={b"p4": obj[b'change'], b"convert_revision": obj[b'change']},
         )
 
     def _fetch_revision(self, rev):
         """Return an output of `p4 describe` including author, commit date as
         a dictionary."""
-        cmd = "p4 -G describe -s %s" % rev
-        stdout = procutil.popen(cmd, mode='rb')
+        cmd = b"p4 -G describe -s %s" % rev
+        stdout = procutil.popen(cmd, mode=b'rb')
         return marshal.load(stdout)
 
     def getcommit(self, rev):
@@ -387,7 +395,7 @@
             d = self._fetch_revision(rev)
             return self._construct_commit(d, parents=None)
         raise error.Abort(
-            _("cannot find %s in the revmap or parsed changesets") % rev
+            _(b"cannot find %s in the revmap or parsed changesets") % rev
         )
 
     def gettags(self):
--- a/hgext/convert/subversion.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/convert/subversion.py	Sun Oct 06 09:48:39 2019 -0400
@@ -54,7 +54,7 @@
     import warnings
 
     warnings.filterwarnings(
-        'ignore', module='svn.core', category=DeprecationWarning
+        b'ignore', module=b'svn.core', category=DeprecationWarning
     )
     svn.core.SubversionException  # trigger import to catch error
 
@@ -80,16 +80,16 @@
     >>> revsplit(b'bad')
     ('', '', 0)
     """
-    parts = rev.rsplit('@', 1)
+    parts = rev.rsplit(b'@', 1)
     revnum = 0
     if len(parts) > 1:
         revnum = int(parts[1])
-    parts = parts[0].split('/', 1)
-    uuid = ''
-    mod = ''
-    if len(parts) > 1 and parts[0].startswith('svn:'):
+    parts = parts[0].split(b'/', 1)
+    uuid = b''
+    mod = b''
+    if len(parts) > 1 and parts[0].startswith(b'svn:'):
         uuid = parts[0][4:]
-        mod = '/' + parts[1]
+        mod = b'/' + parts[1]
     return uuid, mod, revnum
 
 
@@ -101,7 +101,7 @@
     # so we can extend it safely with new components. The "safe"
     # characters were taken from the "svn_uri__char_validity" table in
     # libsvn_subr/path.c.
-    return urlreq.quote(s, "!$&'()*+,-./:=@_~")
+    return urlreq.quote(s, b"!$&'()*+,-./:=@_~")
 
 
 def geturl(path):
@@ -113,11 +113,11 @@
     if os.path.isdir(path):
         path = os.path.normpath(os.path.abspath(path))
         if pycompat.iswindows:
-            path = '/' + util.normpath(path)
+            path = b'/' + util.normpath(path)
         # Module URL is later compared with the repository URL returned
         # by svn API, which is UTF-8.
         path = encoding.tolocal(path)
-        path = 'file://%s' % quote(path)
+        path = b'file://%s' % quote(path)
     return svn.core.svn_path_canonicalize(path)
 
 
@@ -188,7 +188,7 @@
     """
     if svn is None:
         raise error.Abort(
-            _('debugsvnlog could not load Subversion python ' 'bindings')
+            _(b'debugsvnlog could not load Subversion python ' b'bindings')
         )
 
     args = decodeargs(ui.fin.read())
@@ -208,8 +208,8 @@
             except EOFError:
                 raise error.Abort(
                     _(
-                        'Mercurial failed to run itself, check'
-                        ' hg executable is in PATH'
+                        b'Mercurial failed to run itself, check'
+                        b' hg executable is in PATH'
                     )
                 )
             try:
@@ -217,7 +217,7 @@
             except (TypeError, ValueError):
                 if entry is None:
                     break
-                raise error.Abort(_("log stream exception '%s'") % entry)
+                raise error.Abort(_(b"log stream exception '%s'") % entry)
             yield entry
 
     def close(self):
@@ -270,7 +270,7 @@
 # looking for several svn-specific files and directories in the given
 # directory.
 def filecheck(ui, path, proto):
-    for x in ('locks', 'hooks', 'format', 'db'):
+    for x in (b'locks', b'hooks', b'format', b'db'):
         if not os.path.exists(os.path.join(path, x)):
             return False
     return True
@@ -282,16 +282,16 @@
 def httpcheck(ui, path, proto):
     try:
         opener = urlreq.buildopener()
-        rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path), 'rb')
+        rsp = opener.open(b'%s://%s/!svn/ver/0/.svn' % (proto, path), b'rb')
         data = rsp.read()
     except urlerr.httperror as inst:
         if inst.code != 404:
             # Except for 404 we cannot know for sure this is not an svn repo
             ui.warn(
                 _(
-                    'svn: cannot probe remote repository, assume it could '
-                    'be a subversion repository. Use --source-type if you '
-                    'know better.\n'
+                    b'svn: cannot probe remote repository, assume it could '
+                    b'be a subversion repository. Use --source-type if you '
+                    b'know better.\n'
                 )
             )
             return True
@@ -299,38 +299,38 @@
     except Exception:
         # Could be urlerr.urlerror if the URL is invalid or anything else.
         return False
-    return '<m:human-readable errcode="160013">' in data
+    return b'<m:human-readable errcode="160013">' in data
 
 
 protomap = {
-    'http': httpcheck,
-    'https': httpcheck,
-    'file': filecheck,
+    b'http': httpcheck,
+    b'https': httpcheck,
+    b'file': filecheck,
 }
 
 
 def issvnurl(ui, url):
     try:
-        proto, path = url.split('://', 1)
-        if proto == 'file':
+        proto, path = url.split(b'://', 1)
+        if proto == b'file':
             if (
                 pycompat.iswindows
-                and path[:1] == '/'
+                and path[:1] == b'/'
                 and path[1:2].isalpha()
-                and path[2:6].lower() == '%3a/'
+                and path[2:6].lower() == b'%3a/'
             ):
-                path = path[:2] + ':/' + path[6:]
+                path = path[:2] + b':/' + path[6:]
             path = urlreq.url2pathname(path)
     except ValueError:
-        proto = 'file'
+        proto = b'file'
         path = os.path.abspath(url)
-    if proto == 'file':
+    if proto == b'file':
         path = util.pconvert(path)
     check = protomap.get(proto, lambda *args: False)
-    while '/' in path:
+    while b'/' in path:
         if check(ui, path, proto):
             return True
-        path = path.rsplit('/', 1)[0]
+        path = path.rsplit(b'/', 1)[0]
     return False
 
 
@@ -353,35 +353,35 @@
         super(svn_source, self).__init__(ui, repotype, url, revs=revs)
 
         if not (
-            url.startswith('svn://')
-            or url.startswith('svn+ssh://')
+            url.startswith(b'svn://')
+            or url.startswith(b'svn+ssh://')
             or (
                 os.path.exists(url)
-                and os.path.exists(os.path.join(url, '.svn'))
+                and os.path.exists(os.path.join(url, b'.svn'))
             )
             or issvnurl(ui, url)
         ):
             raise NoRepo(
-                _("%s does not look like a Subversion repository") % url
+                _(b"%s does not look like a Subversion repository") % url
             )
         if svn is None:
-            raise MissingTool(_('could not load Subversion python bindings'))
+            raise MissingTool(_(b'could not load Subversion python bindings'))
 
         try:
             version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
             if version < (1, 4):
                 raise MissingTool(
                     _(
-                        'Subversion python bindings %d.%d found, '
-                        '1.4 or later required'
+                        b'Subversion python bindings %d.%d found, '
+                        b'1.4 or later required'
                     )
                     % version
                 )
         except AttributeError:
             raise MissingTool(
                 _(
-                    'Subversion python bindings are too old, 1.4 '
-                    'or later required'
+                    b'Subversion python bindings are too old, 1.4 '
+                    b'or later required'
                 )
             )
 
@@ -391,14 +391,14 @@
         try:
             # Support file://path@rev syntax. Useful e.g. to convert
             # deleted branches.
-            at = url.rfind('@')
+            at = url.rfind(b'@')
             if at >= 0:
                 latest = int(url[at + 1 :])
                 url = url[:at]
         except ValueError:
             pass
         self.url = geturl(url)
-        self.encoding = 'UTF-8'  # Subversion is always nominal UTF-8
+        self.encoding = b'UTF-8'  # Subversion is always nominal UTF-8
         try:
             self.transport = transport.SvnRaTransport(url=self.url)
             self.ra = self.transport.ra
@@ -414,15 +414,15 @@
             self.uuid = svn.ra.get_uuid(self.ra)
         except svn.core.SubversionException:
             ui.traceback()
-            svnversion = '%d.%d.%d' % (
+            svnversion = b'%d.%d.%d' % (
                 svn.core.SVN_VER_MAJOR,
                 svn.core.SVN_VER_MINOR,
                 svn.core.SVN_VER_MICRO,
             )
             raise NoRepo(
                 _(
-                    "%s does not look like a Subversion repository "
-                    "to libsvn version %s"
+                    b"%s does not look like a Subversion repository "
+                    b"to libsvn version %s"
                 )
                 % (self.url, svnversion)
             )
@@ -431,29 +431,29 @@
             if len(revs) > 1:
                 raise error.Abort(
                     _(
-                        'subversion source does not support '
-                        'specifying multiple revisions'
+                        b'subversion source does not support '
+                        b'specifying multiple revisions'
                     )
                 )
             try:
                 latest = int(revs[0])
             except ValueError:
                 raise error.Abort(
-                    _('svn: revision %s is not an integer') % revs[0]
+                    _(b'svn: revision %s is not an integer') % revs[0]
                 )
 
-        trunkcfg = self.ui.config('convert', 'svn.trunk')
+        trunkcfg = self.ui.config(b'convert', b'svn.trunk')
         if trunkcfg is None:
-            trunkcfg = 'trunk'
-        self.trunkname = trunkcfg.strip('/')
-        self.startrev = self.ui.config('convert', 'svn.startrev')
+            trunkcfg = b'trunk'
+        self.trunkname = trunkcfg.strip(b'/')
+        self.startrev = self.ui.config(b'convert', b'svn.startrev')
         try:
             self.startrev = int(self.startrev)
             if self.startrev < 0:
                 self.startrev = 0
         except ValueError:
             raise error.Abort(
-                _('svn: start revision %s is not an integer') % self.startrev
+                _(b'svn: start revision %s is not an integer') % self.startrev
             )
 
         try:
@@ -461,12 +461,14 @@
         except SvnPathNotFound:
             self.head = None
         if not self.head:
-            raise error.Abort(_('no revision found in module %s') % self.module)
+            raise error.Abort(
+                _(b'no revision found in module %s') % self.module
+            )
         self.last_changed = self.revnum(self.head)
 
         self._changescache = (None, None)
 
-        if os.path.exists(os.path.join(url, '.svn/entries')):
+        if os.path.exists(os.path.join(url, b'.svn/entries')):
             self.wc = url
         else:
             self.wc = None
@@ -484,7 +486,7 @@
     def exists(self, path, optrev):
         try:
             svn.client.ls(
-                self.url.rstrip('/') + '/' + quote(path),
+                self.url.rstrip(b'/') + b'/' + quote(path),
                 optrev,
                 False,
                 self.ctx,
@@ -499,61 +501,62 @@
             return kind == svn.core.svn_node_dir
 
         def getcfgpath(name, rev):
-            cfgpath = self.ui.config('convert', 'svn.' + name)
-            if cfgpath is not None and cfgpath.strip() == '':
+            cfgpath = self.ui.config(b'convert', b'svn.' + name)
+            if cfgpath is not None and cfgpath.strip() == b'':
                 return None
-            path = (cfgpath or name).strip('/')
+            path = (cfgpath or name).strip(b'/')
             if not self.exists(path, rev):
-                if self.module.endswith(path) and name == 'trunk':
+                if self.module.endswith(path) and name == b'trunk':
                     # we are converting from inside this directory
                     return None
                 if cfgpath:
                     raise error.Abort(
-                        _('expected %s to be at %r, but not found')
+                        _(b'expected %s to be at %r, but not found')
                         % (name, path)
                     )
                 return None
-            self.ui.note(_('found %s at %r\n') % (name, path))
+            self.ui.note(_(b'found %s at %r\n') % (name, path))
             return path
 
         rev = optrev(self.last_changed)
-        oldmodule = ''
-        trunk = getcfgpath('trunk', rev)
-        self.tags = getcfgpath('tags', rev)
-        branches = getcfgpath('branches', rev)
+        oldmodule = b''
+        trunk = getcfgpath(b'trunk', rev)
+        self.tags = getcfgpath(b'tags', rev)
+        branches = getcfgpath(b'branches', rev)
 
         # If the project has a trunk or branches, we will extract heads
         # from them. We keep the project root otherwise.
         if trunk:
-            oldmodule = self.module or ''
-            self.module += '/' + trunk
+            oldmodule = self.module or b''
+            self.module += b'/' + trunk
             self.head = self.latest(self.module, self.last_changed)
             if not self.head:
                 raise error.Abort(
-                    _('no revision found in module %s') % self.module
+                    _(b'no revision found in module %s') % self.module
                 )
 
         # First head in the list is the module's head
         self.heads = [self.head]
         if self.tags is not None:
-            self.tags = '%s/%s' % (oldmodule, (self.tags or 'tags'))
+            self.tags = b'%s/%s' % (oldmodule, (self.tags or b'tags'))
 
         # Check if branches bring a few more heads to the list
         if branches:
-            rpath = self.url.strip('/')
+            rpath = self.url.strip(b'/')
             branchnames = svn.client.ls(
-                rpath + '/' + quote(branches), rev, False, self.ctx
+                rpath + b'/' + quote(branches), rev, False, self.ctx
             )
             for branch in sorted(branchnames):
-                module = '%s/%s/%s' % (oldmodule, branches, branch)
+                module = b'%s/%s/%s' % (oldmodule, branches, branch)
                 if not isdir(module, self.last_changed):
                     continue
                 brevid = self.latest(module, self.last_changed)
                 if not brevid:
-                    self.ui.note(_('ignoring empty branch %s\n') % branch)
+                    self.ui.note(_(b'ignoring empty branch %s\n') % branch)
                     continue
                 self.ui.note(
-                    _('found branch %s at %d\n') % (branch, self.revnum(brevid))
+                    _(b'found branch %s at %d\n')
+                    % (branch, self.revnum(brevid))
                 )
                 self.heads.append(brevid)
 
@@ -561,14 +564,14 @@
             if len(self.heads) > 1:
                 raise error.Abort(
                     _(
-                        'svn: start revision is not supported '
-                        'with more than one branch'
+                        b'svn: start revision is not supported '
+                        b'with more than one branch'
                     )
                 )
             revnum = self.revnum(self.heads[0])
             if revnum < self.startrev:
                 raise error.Abort(
-                    _('svn: no revision found after start revision %d')
+                    _(b'svn: no revision found after start revision %d')
                     % self.startrev
                 )
 
@@ -628,13 +631,13 @@
                 stop = revnum + 1
             self._fetch_revisions(revnum, stop)
             if rev not in self.commits:
-                raise error.Abort(_('svn: revision %s not found') % revnum)
+                raise error.Abort(_(b'svn: revision %s not found') % revnum)
         revcommit = self.commits[rev]
         # caller caches the result, so free it here to release memory
         del self.commits[rev]
         return revcommit
 
-    def checkrevformat(self, revstr, mapname='splicemap'):
+    def checkrevformat(self, revstr, mapname=b'splicemap'):
         """ fails if revision format does not match the correct format"""
         if not re.match(
             r'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
@@ -643,12 +646,12 @@
             revstr,
         ):
             raise error.Abort(
-                _('%s entry %s is not a valid revision' ' identifier')
+                _(b'%s entry %s is not a valid revision' b' identifier')
                 % (mapname, revstr)
             )
 
     def numcommits(self):
-        return int(self.head.rsplit('@', 1)[1]) - self.startrev
+        return int(self.head.rsplit(b'@', 1)[1]) - self.startrev
 
     def gettags(self):
         tags = {}
@@ -689,7 +692,7 @@
                     srctagspath = copies.pop()[0]
 
                 for source, sourcerev, dest in copies:
-                    if not dest.startswith(tagspath + '/'):
+                    if not dest.startswith(tagspath + b'/'):
                         continue
                     for tag in pendings:
                         if tag[0].startswith(dest):
@@ -709,14 +712,14 @@
                 addeds = dict(
                     (p, e.copyfrom_path)
                     for p, e in origpaths.iteritems()
-                    if e.action == 'A' and e.copyfrom_path
+                    if e.action == b'A' and e.copyfrom_path
                 )
                 badroots = set()
                 for destroot in addeds:
                     for source, sourcerev, dest in pendings:
                         if not dest.startswith(
-                            destroot + '/'
-                        ) or source.startswith(addeds[destroot] + '/'):
+                            destroot + b'/'
+                        ) or source.startswith(addeds[destroot] + b'/'):
                             continue
                         badroots.add(destroot)
                         break
@@ -726,13 +729,13 @@
                         p
                         for p in pendings
                         if p[2] != badroot
-                        and not p[2].startswith(badroot + '/')
+                        and not p[2].startswith(badroot + b'/')
                     ]
 
                 # Tell tag renamings from tag creations
                 renamings = []
                 for source, sourcerev, dest in pendings:
-                    tagname = dest.split('/')[-1]
+                    tagname = dest.split(b'/')[-1]
                     if source.startswith(srctagspath):
                         renamings.append([source, sourcerev, tagname])
                         continue
@@ -761,18 +764,18 @@
             return
         if self.convertfp is None:
             self.convertfp = open(
-                os.path.join(self.wc, '.svn', 'hg-shamap'), 'ab'
+                os.path.join(self.wc, b'.svn', b'hg-shamap'), b'ab'
             )
         self.convertfp.write(
-            util.tonativeeol('%s %d\n' % (destrev, self.revnum(rev)))
+            util.tonativeeol(b'%s %d\n' % (destrev, self.revnum(rev)))
         )
         self.convertfp.flush()
 
     def revid(self, revnum, module=None):
-        return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
+        return b'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
 
     def revnum(self, rev):
-        return int(rev.split('@')[-1])
+        return int(rev.split(b'@')[-1])
 
     def latest(self, path, stop=None):
         """Find the latest revid affecting path, up to stop revision
@@ -800,7 +803,7 @@
                             continue
                         newpath = paths[p].copyfrom_path + path[len(p) :]
                         self.ui.debug(
-                            "branch renamed from %s to %s at %d\n"
+                            b"branch renamed from %s to %s at %d\n"
                             % (path, newpath, revnum)
                         )
                         path = newpath
@@ -813,20 +816,20 @@
 
         if not path.startswith(self.rootmodule):
             # Requests on foreign branches may be forbidden at server level
-            self.ui.debug('ignoring foreign branch %r\n' % path)
+            self.ui.debug(b'ignoring foreign branch %r\n' % path)
             return None
 
         if stop is None:
             stop = svn.ra.get_latest_revnum(self.ra)
         try:
-            prevmodule = self.reparent('')
-            dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
+            prevmodule = self.reparent(b'')
+            dirent = svn.ra.stat(self.ra, path.strip(b'/'), stop)
             self.reparent(prevmodule)
         except svn.core.SubversionException:
             dirent = None
         if not dirent:
             raise SvnPathNotFound(
-                _('%s not found up to revision %d') % (path, stop)
+                _(b'%s not found up to revision %d') % (path, stop)
             )
 
         # stat() gives us the previous revision on this line of
@@ -843,11 +846,11 @@
             # the whole history.
             revnum, realpath = findchanges(path, stop)
             if revnum is None:
-                self.ui.debug('ignoring empty branch %r\n' % realpath)
+                self.ui.debug(b'ignoring empty branch %r\n' % realpath)
                 return None
 
         if not realpath.startswith(self.rootmodule):
-            self.ui.debug('ignoring foreign branch %r\n' % realpath)
+            self.ui.debug(b'ignoring foreign branch %r\n' % realpath)
             return None
         return self.revid(revnum, realpath)
 
@@ -858,8 +861,8 @@
         svnurl = self.baseurl + quote(module)
         prevmodule = self.prevmodule
         if prevmodule is None:
-            prevmodule = ''
-        self.ui.debug("reparent to %s\n" % svnurl)
+            prevmodule = b''
+        self.ui.debug(b"reparent to %s\n" % svnurl)
         svn.ra.reparent(self.ra, svnurl)
         self.prevmodule = module
         return prevmodule
@@ -874,7 +877,7 @@
             self.reparent(self.module)
 
         progress = self.ui.makeprogress(
-            _('scanning paths'), unit=_('paths'), total=len(paths)
+            _(b'scanning paths'), unit=_(b'paths'), total=len(paths)
         )
         for i, (path, ent) in enumerate(paths):
             progress.update(i, item=path)
@@ -894,37 +897,37 @@
                 if not copyfrom_path:
                     continue
                 self.ui.debug(
-                    "copied to %s from %s@%s\n"
+                    b"copied to %s from %s@%s\n"
                     % (entrypath, copyfrom_path, ent.copyfrom_rev)
                 )
                 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
             elif kind == 0:  # gone, but had better be a deleted *file*
-                self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
+                self.ui.debug(b"gone from %s\n" % ent.copyfrom_rev)
                 pmodule, prevnum = revsplit(parents[0])[1:]
-                parentpath = pmodule + "/" + entrypath
+                parentpath = pmodule + b"/" + entrypath
                 fromkind = self._checkpath(entrypath, prevnum, pmodule)
 
                 if fromkind == svn.core.svn_node_file:
                     removed.add(self.recode(entrypath))
                 elif fromkind == svn.core.svn_node_dir:
-                    oroot = parentpath.strip('/')
-                    nroot = path.strip('/')
+                    oroot = parentpath.strip(b'/')
+                    nroot = path.strip(b'/')
                     children = self._iterfiles(oroot, prevnum)
                     for childpath in children:
                         childpath = childpath.replace(oroot, nroot)
-                        childpath = self.getrelpath("/" + childpath, pmodule)
+                        childpath = self.getrelpath(b"/" + childpath, pmodule)
                         if childpath:
                             removed.add(self.recode(childpath))
                 else:
                     self.ui.debug(
-                        'unknown path in revision %d: %s\n' % (revnum, path)
+                        b'unknown path in revision %d: %s\n' % (revnum, path)
                     )
             elif kind == svn.core.svn_node_dir:
-                if ent.action == 'M':
+                if ent.action == b'M':
                     # If the directory just had a prop change,
                     # then we shouldn't need to look for its children.
                     continue
-                if ent.action == 'R' and parents:
+                if ent.action == b'R' and parents:
                     # If a directory is replacing a file, mark the previous
                     # file as deleted
                     pmodule, prevnum = revsplit(parents[0])[1:]
@@ -935,12 +938,12 @@
                         # We do not know what files were kept or removed,
                         # mark them all as changed.
                         for childpath in self._iterfiles(pmodule, prevnum):
-                            childpath = self.getrelpath("/" + childpath)
+                            childpath = self.getrelpath(b"/" + childpath)
                             if childpath:
                                 changed.add(self.recode(childpath))
 
                 for childpath in self._iterfiles(path, revnum):
-                    childpath = self.getrelpath("/" + childpath)
+                    childpath = self.getrelpath(b"/" + childpath)
                     if childpath:
                         changed.add(self.recode(childpath))
 
@@ -956,12 +959,12 @@
                 if not copyfrompath:
                     continue
                 self.ui.debug(
-                    "mark %s came from %s:%d\n"
+                    b"mark %s came from %s:%d\n"
                     % (path, copyfrompath, ent.copyfrom_rev)
                 )
                 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
                 for childpath in children:
-                    childpath = self.getrelpath("/" + childpath, pmodule)
+                    childpath = self.getrelpath(b"/" + childpath, pmodule)
                     if not childpath:
                         continue
                     copytopath = path + childpath[len(copyfrompath) :]
@@ -983,7 +986,8 @@
             the revision is a branch root.
             """
             self.ui.debug(
-                "parsing revision %d (%d changes)\n" % (revnum, len(orig_paths))
+                b"parsing revision %d (%d changes)\n"
+                % (revnum, len(orig_paths))
             )
 
             branched = False
@@ -1012,11 +1016,11 @@
                         if prevnum >= self.startrev:
                             parents = [previd]
                             self.ui.note(
-                                _('found parent of branch %s at %d: %s\n')
+                                _(b'found parent of branch %s at %d: %s\n')
                                 % (self.module, prevnum, prevmodule)
                             )
                 else:
-                    self.ui.debug("no copyfrom path, don't know what to do.\n")
+                    self.ui.debug(b"no copyfrom path, don't know what to do.\n")
 
             paths = []
             # filter out unrelated paths
@@ -1028,22 +1032,24 @@
             # Example SVN datetime. Includes microseconds.
             # ISO-8601 conformant
             # '2007-01-04T17:35:00.902377Z'
-            date = dateutil.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
-            if self.ui.configbool('convert', 'localtimezone'):
+            date = dateutil.parsedate(
+                date[:19] + b" UTC", [b"%Y-%m-%dT%H:%M:%S"]
+            )
+            if self.ui.configbool(b'convert', b'localtimezone'):
                 date = makedatetimestamp(date[0])
 
             if message:
                 log = self.recode(message)
             else:
-                log = ''
+                log = b''
 
             if author:
                 author = self.recode(author)
             else:
-                author = ''
+                author = b''
 
             try:
-                branch = self.module.split("/")[-1]
+                branch = self.module.split(b"/")[-1]
                 if branch == self.trunkname:
                     branch = None
             except IndexError:
@@ -1051,7 +1057,7 @@
 
             cset = commit(
                 author=author,
-                date=dateutil.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
+                date=dateutil.datestr(date, b'%Y-%m-%d %H:%M:%S %1%2'),
                 desc=log,
                 parents=parents,
                 branch=branch,
@@ -1068,7 +1074,7 @@
             return cset, branched
 
         self.ui.note(
-            _('fetching revision log for "%s" from %d to %d\n')
+            _(b'fetching revision log for "%s" from %d to %d\n')
             % (self.module, from_revnum, to_revnum)
         )
 
@@ -1083,7 +1089,7 @@
                         lastonbranch = True
                         break
                     if not paths:
-                        self.ui.debug('revision %d has no entries\n' % revnum)
+                        self.ui.debug(b'revision %d has no entries\n' % revnum)
                         # If we ever leave the loop on an empty
                         # revision, do not try to get a parent branch
                         lastonbranch = lastonbranch or revnum == 0
@@ -1114,7 +1120,7 @@
             (inst, num) = xxx_todo_changeme.args
             if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
                 raise error.Abort(
-                    _('svn: branch has no revision %s') % to_revnum
+                    _(b'svn: branch has no revision %s') % to_revnum
                 )
             raise
 
@@ -1135,8 +1141,8 @@
             io.close()
             if isinstance(info, list):
                 info = info[-1]
-            mode = ("svn:executable" in info) and 'x' or ''
-            mode = ("svn:special" in info) and 'l' or mode
+            mode = (b"svn:executable" in info) and b'x' or b''
+            mode = (b"svn:special" in info) and b'l' or mode
         except svn.core.SubversionException as e:
             notfound = (
                 svn.core.SVN_ERR_FS_NOT_FOUND,
@@ -1145,20 +1151,20 @@
             if e.apr_err in notfound:  # File not found
                 return None, None
             raise
-        if mode == 'l':
-            link_prefix = "link "
+        if mode == b'l':
+            link_prefix = b"link "
             if data.startswith(link_prefix):
                 data = data[len(link_prefix) :]
         return data, mode
 
     def _iterfiles(self, path, revnum):
         """Enumerate all files in path at revnum, recursively."""
-        path = path.strip('/')
+        path = path.strip(b'/')
         pool = svn.core.Pool()
-        rpath = '/'.join([self.baseurl, quote(path)]).strip('/')
+        rpath = b'/'.join([self.baseurl, quote(path)]).strip(b'/')
         entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
         if path:
-            path += '/'
+            path += b'/'
         return (
             (path + p)
             for p, e in entries.iteritems()
@@ -1175,24 +1181,24 @@
         #   "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
         # that is to say "tests/PloneTestCase.py"
         if path.startswith(module):
-            relative = path.rstrip('/')[len(module) :]
-            if relative.startswith('/'):
+            relative = path.rstrip(b'/')[len(module) :]
+            if relative.startswith(b'/'):
                 return relative[1:]
-            elif relative == '':
+            elif relative == b'':
                 return relative
 
         # The path is outside our tracked tree...
-        self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
+        self.ui.debug(b'%r is not under %r, ignoring\n' % (path, module))
         return None
 
     def _checkpath(self, path, revnum, module=None):
         if module is not None:
-            prevmodule = self.reparent('')
-            path = module + '/' + path
+            prevmodule = self.reparent(b'')
+            path = module + b'/' + path
         try:
             # ra.check_path does not like leading slashes very much, it leads
             # to PROPFIND subversion errors
-            return svn.ra.check_path(self.ra, path.strip('/'), revnum)
+            return svn.ra.check_path(self.ra, path.strip(b'/'), revnum)
         finally:
             if module is not None:
                 self.reparent(prevmodule)
@@ -1210,9 +1216,9 @@
         # supplied URL
         relpaths = []
         for p in paths:
-            if not p.startswith('/'):
-                p = self.module + '/' + p
-            relpaths.append(p.strip('/'))
+            if not p.startswith(b'/'):
+                p = self.module + b'/' + p
+            relpaths.append(p.strip(b'/'))
         args = [
             self.baseurl,
             relpaths,
@@ -1223,11 +1229,11 @@
             strict_node_history,
         ]
         # developer config: convert.svn.debugsvnlog
-        if not self.ui.configbool('convert', 'svn.debugsvnlog'):
+        if not self.ui.configbool(b'convert', b'svn.debugsvnlog'):
             return directlogstream(*args)
         arg = encodeargs(args)
         hgexe = procutil.hgexecutable()
-        cmd = '%s debugsvnlog' % procutil.shellquote(hgexe)
+        cmd = b'%s debugsvnlog' % procutil.shellquote(hgexe)
         stdin, stdout = procutil.popen2(procutil.quotecommand(cmd))
         stdin.write(arg)
         try:
@@ -1235,8 +1241,8 @@
         except IOError:
             raise error.Abort(
                 _(
-                    'Mercurial failed to run itself, check'
-                    ' hg executable is in PATH'
+                    b'Mercurial failed to run itself, check'
+                    b' hg executable is in PATH'
                 )
             )
         return logstream(stdout)
@@ -1272,18 +1278,18 @@
             os.chdir(self.cwd)
 
     def join(self, name):
-        return os.path.join(self.wc, '.svn', name)
+        return os.path.join(self.wc, b'.svn', name)
 
     def revmapfile(self):
-        return self.join('hg-shamap')
+        return self.join(b'hg-shamap')
 
     def authorfile(self):
-        return self.join('hg-authormap')
+        return self.join(b'hg-authormap')
 
     def __init__(self, ui, repotype, path):
 
         converter_sink.__init__(self, ui, repotype, path)
-        commandline.__init__(self, ui, 'svn')
+        commandline.__init__(self, ui, b'svn')
         self.delete = []
         self.setexec = []
         self.delexec = []
@@ -1292,51 +1298,53 @@
         self.cwd = encoding.getcwd()
 
         created = False
-        if os.path.isfile(os.path.join(path, '.svn', 'entries')):
+        if os.path.isfile(os.path.join(path, b'.svn', b'entries')):
             self.wc = os.path.realpath(path)
-            self.run0('update')
+            self.run0(b'update')
         else:
             if not re.search(br'^(file|http|https|svn|svn\+ssh)\://', path):
                 path = os.path.realpath(path)
                 if os.path.isdir(os.path.dirname(path)):
-                    if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
+                    if not os.path.exists(
+                        os.path.join(path, b'db', b'fs-type')
+                    ):
                         ui.status(
-                            _("initializing svn repository '%s'\n")
+                            _(b"initializing svn repository '%s'\n")
                             % os.path.basename(path)
                         )
-                        commandline(ui, 'svnadmin').run0('create', path)
+                        commandline(ui, b'svnadmin').run0(b'create', path)
                         created = path
                     path = util.normpath(path)
-                    if not path.startswith('/'):
-                        path = '/' + path
-                    path = 'file://' + path
+                    if not path.startswith(b'/'):
+                        path = b'/' + path
+                    path = b'file://' + path
 
             wcpath = os.path.join(
-                encoding.getcwd(), os.path.basename(path) + '-wc'
+                encoding.getcwd(), os.path.basename(path) + b'-wc'
             )
             ui.status(
-                _("initializing svn working copy '%s'\n")
+                _(b"initializing svn working copy '%s'\n")
                 % os.path.basename(wcpath)
             )
-            self.run0('checkout', path, wcpath)
+            self.run0(b'checkout', path, wcpath)
 
             self.wc = wcpath
         self.opener = vfsmod.vfs(self.wc)
         self.wopener = vfsmod.vfs(self.wc)
-        self.childmap = mapfile(ui, self.join('hg-childmap'))
+        self.childmap = mapfile(ui, self.join(b'hg-childmap'))
         if util.checkexec(self.wc):
             self.is_exec = util.isexec
         else:
             self.is_exec = None
 
         if created:
-            hook = os.path.join(created, 'hooks', 'pre-revprop-change')
-            fp = open(hook, 'wb')
+            hook = os.path.join(created, b'hooks', b'pre-revprop-change')
+            fp = open(hook, b'wb')
             fp.write(pre_revprop_change)
             fp.close()
             util.setflags(hook, False, True)
 
-        output = self.run0('info')
+        output = self.run0(b'info')
         self.uuid = self.uuid_re.search(output).group(1).strip()
 
     def wjoin(self, *names):
@@ -1348,7 +1356,7 @@
         # already tracked entries, so we have to track and filter them
         # ourselves.
         m = set()
-        output = self.run0('ls', recursive=True, xml=True)
+        output = self.run0(b'ls', recursive=True, xml=True)
         doc = xml.dom.minidom.parseString(output)
         for e in doc.getElementsByTagName(r'entry'):
             for n in e.childNodes:
@@ -1367,7 +1375,7 @@
         return m
 
     def putfile(self, filename, flags, data):
-        if 'l' in flags:
+        if b'l' in flags:
             self.wopener.symlink(data, filename)
         else:
             try:
@@ -1387,12 +1395,12 @@
 
             if self.is_exec:
                 if wasexec:
-                    if 'x' not in flags:
+                    if b'x' not in flags:
                         self.delexec.append(filename)
                 else:
-                    if 'x' in flags:
+                    if b'x' in flags:
                         self.setexec.append(filename)
-                util.setflags(self.wjoin(filename), False, 'x' in flags)
+                util.setflags(self.wjoin(filename), False, b'x' in flags)
 
     def _copyfile(self, source, dest):
         # SVN's copy command pukes if the destination file exists, but
@@ -1402,13 +1410,13 @@
         exists = os.path.lexists(wdest)
         if exists:
             fd, tempname = pycompat.mkstemp(
-                prefix='hg-copy-', dir=os.path.dirname(wdest)
+                prefix=b'hg-copy-', dir=os.path.dirname(wdest)
             )
             os.close(fd)
             os.unlink(tempname)
             os.rename(wdest, tempname)
         try:
-            self.run0('copy', source, dest)
+            self.run0(b'copy', source, dest)
         finally:
             self.manifest.add(dest)
             if exists:
@@ -1424,7 +1432,7 @@
             if os.path.isdir(self.wjoin(f)):
                 dirs.add(f)
             i = len(f)
-            for i in iter(lambda: f.rfind('/', 0, i), -1):
+            for i in iter(lambda: f.rfind(b'/', 0, i), -1):
                 dirs.add(f[:i])
         return dirs
 
@@ -1434,21 +1442,21 @@
         ]
         if add_dirs:
             self.manifest.update(add_dirs)
-            self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
+            self.xargs(add_dirs, b'add', non_recursive=True, quiet=True)
         return add_dirs
 
     def add_files(self, files):
         files = [f for f in files if f not in self.manifest]
         if files:
             self.manifest.update(files)
-            self.xargs(files, 'add', quiet=True)
+            self.xargs(files, b'add', quiet=True)
         return files
 
     def addchild(self, parent, child):
         self.childmap[parent] = child
 
     def revid(self, rev):
-        return "svn:%s@%s" % (self.uuid, rev)
+        return b"svn:%s@%s" % (self.uuid, rev)
 
     def putcommit(
         self, files, copies, parents, commit, source, revmap, full, cleanp2
@@ -1480,49 +1488,49 @@
                 self._copyfile(s, d)
             self.copies = []
         if self.delete:
-            self.xargs(self.delete, 'delete')
+            self.xargs(self.delete, b'delete')
             for f in self.delete:
                 self.manifest.remove(f)
             self.delete = []
         entries.update(self.add_files(files.difference(entries)))
         if self.delexec:
-            self.xargs(self.delexec, 'propdel', 'svn:executable')
+            self.xargs(self.delexec, b'propdel', b'svn:executable')
             self.delexec = []
         if self.setexec:
-            self.xargs(self.setexec, 'propset', 'svn:executable', '*')
+            self.xargs(self.setexec, b'propset', b'svn:executable', b'*')
             self.setexec = []
 
-        fd, messagefile = pycompat.mkstemp(prefix='hg-convert-')
+        fd, messagefile = pycompat.mkstemp(prefix=b'hg-convert-')
         fp = os.fdopen(fd, r'wb')
         fp.write(util.tonativeeol(commit.desc))
         fp.close()
         try:
             output = self.run0(
-                'commit',
+                b'commit',
                 username=stringutil.shortuser(commit.author),
                 file=messagefile,
-                encoding='utf-8',
+                encoding=b'utf-8',
             )
             try:
                 rev = self.commit_re.search(output).group(1)
             except AttributeError:
                 if not files:
-                    return parents[0] if parents else 'None'
-                self.ui.warn(_('unexpected svn output:\n'))
+                    return parents[0] if parents else b'None'
+                self.ui.warn(_(b'unexpected svn output:\n'))
                 self.ui.warn(output)
-                raise error.Abort(_('unable to cope with svn output'))
+                raise error.Abort(_(b'unable to cope with svn output'))
             if commit.rev:
                 self.run(
-                    'propset',
-                    'hg:convert-rev',
+                    b'propset',
+                    b'hg:convert-rev',
                     commit.rev,
                     revprop=True,
                     revision=rev,
                 )
-            if commit.branch and commit.branch != 'default':
+            if commit.branch and commit.branch != b'default':
                 self.run(
-                    'propset',
-                    'hg:convert-branch',
+                    b'propset',
+                    b'hg:convert-branch',
                     commit.branch,
                     revprop=True,
                     revision=rev,
@@ -1534,7 +1542,7 @@
             os.unlink(messagefile)
 
     def puttags(self, tags):
-        self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
+        self.ui.warn(_(b'writing Subversion tags is not yet implemented\n'))
         return None, None
 
     def hascommitfrommap(self, rev):
@@ -1549,8 +1557,8 @@
             return True
         raise error.Abort(
             _(
-                'splice map revision %s not found in subversion '
-                'child map (revision lookups are not implemented)'
+                b'splice map revision %s not found in subversion '
+                b'child map (revision lookups are not implemented)'
             )
             % rev
         )
--- a/hgext/convert/transport.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/convert/transport.py	Sun Oct 06 09:48:39 2019 -0400
@@ -54,13 +54,13 @@
     )
     if getprovider:
         # Available in svn >= 1.6
-        for name in ('gnome_keyring', 'keychain', 'kwallet', 'windows'):
-            for type in ('simple', 'ssl_client_cert_pw', 'ssl_server_trust'):
+        for name in (b'gnome_keyring', b'keychain', b'kwallet', b'windows'):
+            for type in (b'simple', b'ssl_client_cert_pw', b'ssl_server_trust'):
                 p = getprovider(name, type, pool)
                 if p:
                     providers.append(p)
     else:
-        if util.safehasattr(svn.client, 'get_windows_simple_provider'):
+        if util.safehasattr(svn.client, b'get_windows_simple_provider'):
             providers.append(svn.client.get_windows_simple_provider(pool))
 
     return svn.core.svn_auth_open(providers, pool)
@@ -75,14 +75,14 @@
     Open an ra connection to a Subversion repository.
     """
 
-    def __init__(self, url="", ra=None):
+    def __init__(self, url=b"", ra=None):
         self.pool = Pool()
         self.svn_url = url
-        self.username = ''
-        self.password = ''
+        self.username = b''
+        self.password = b''
 
         # Only Subversion 1.4 has reparent()
-        if ra is None or not util.safehasattr(svn.ra, 'reparent'):
+        if ra is None or not util.safehasattr(svn.ra, b'reparent'):
             self.client = svn.client.create_context(self.pool)
             ab = _create_auth_baton(self.pool)
             self.client.auth_baton = ab
--- a/hgext/eol.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/eol.py	Sun Oct 06 09:48:39 2019 -0400
@@ -112,41 +112,41 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 configtable = {}
 configitem = registrar.configitem(configtable)
 
 configitem(
-    'eol', 'fix-trailing-newline', default=False,
+    b'eol', b'fix-trailing-newline', default=False,
 )
 configitem(
-    'eol', 'native', default=pycompat.oslinesep,
+    b'eol', b'native', default=pycompat.oslinesep,
 )
 configitem(
-    'eol', 'only-consistent', default=True,
+    b'eol', b'only-consistent', default=True,
 )
 
 # Matches a lone LF, i.e., one that is not part of CRLF.
-singlelf = re.compile('(^|[^\r])\n')
+singlelf = re.compile(b'(^|[^\r])\n')
 
 
 def inconsistenteol(data):
-    return '\r\n' in data and singlelf.search(data)
+    return b'\r\n' in data and singlelf.search(data)
 
 
 def tolf(s, params, ui, **kwargs):
     """Filter to convert to LF EOLs."""
     if stringutil.binary(s):
         return s
-    if ui.configbool('eol', 'only-consistent') and inconsistenteol(s):
+    if ui.configbool(b'eol', b'only-consistent') and inconsistenteol(s):
         return s
     if (
-        ui.configbool('eol', 'fix-trailing-newline')
+        ui.configbool(b'eol', b'fix-trailing-newline')
         and s
-        and not s.endswith('\n')
+        and not s.endswith(b'\n')
     ):
-        s = s + '\n'
+        s = s + b'\n'
     return util.tolf(s)
 
 
@@ -154,14 +154,14 @@
     """Filter to convert to CRLF EOLs."""
     if stringutil.binary(s):
         return s
-    if ui.configbool('eol', 'only-consistent') and inconsistenteol(s):
+    if ui.configbool(b'eol', b'only-consistent') and inconsistenteol(s):
         return s
     if (
-        ui.configbool('eol', 'fix-trailing-newline')
+        ui.configbool(b'eol', b'fix-trailing-newline')
         and s
-        and not s.endswith('\n')
+        and not s.endswith(b'\n')
     ):
-        s = s + '\n'
+        s = s + b'\n'
     return util.tocrlf(s)
 
 
@@ -171,60 +171,68 @@
 
 
 filters = {
-    'to-lf': tolf,
-    'to-crlf': tocrlf,
-    'is-binary': isbinary,
+    b'to-lf': tolf,
+    b'to-crlf': tocrlf,
+    b'is-binary': isbinary,
     # The following provide backwards compatibility with win32text
-    'cleverencode:': tolf,
-    'cleverdecode:': tocrlf,
+    b'cleverencode:': tolf,
+    b'cleverdecode:': tocrlf,
 }
 
 
 class eolfile(object):
     def __init__(self, ui, root, data):
-        self._decode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'}
-        self._encode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'}
+        self._decode = {
+            b'LF': b'to-lf',
+            b'CRLF': b'to-crlf',
+            b'BIN': b'is-binary',
+        }
+        self._encode = {
+            b'LF': b'to-lf',
+            b'CRLF': b'to-crlf',
+            b'BIN': b'is-binary',
+        }
 
         self.cfg = config.config()
         # Our files should not be touched. The pattern must be
         # inserted first override a '** = native' pattern.
-        self.cfg.set('patterns', '.hg*', 'BIN', 'eol')
+        self.cfg.set(b'patterns', b'.hg*', b'BIN', b'eol')
         # We can then parse the user's patterns.
-        self.cfg.parse('.hgeol', data)
+        self.cfg.parse(b'.hgeol', data)
 
-        isrepolf = self.cfg.get('repository', 'native') != 'CRLF'
-        self._encode['NATIVE'] = isrepolf and 'to-lf' or 'to-crlf'
-        iswdlf = ui.config('eol', 'native') in ('LF', '\n')
-        self._decode['NATIVE'] = iswdlf and 'to-lf' or 'to-crlf'
+        isrepolf = self.cfg.get(b'repository', b'native') != b'CRLF'
+        self._encode[b'NATIVE'] = isrepolf and b'to-lf' or b'to-crlf'
+        iswdlf = ui.config(b'eol', b'native') in (b'LF', b'\n')
+        self._decode[b'NATIVE'] = iswdlf and b'to-lf' or b'to-crlf'
 
         include = []
         exclude = []
         self.patterns = []
-        for pattern, style in self.cfg.items('patterns'):
+        for pattern, style in self.cfg.items(b'patterns'):
             key = style.upper()
-            if key == 'BIN':
+            if key == b'BIN':
                 exclude.append(pattern)
             else:
                 include.append(pattern)
-            m = match.match(root, '', [pattern])
+            m = match.match(root, b'', [pattern])
             self.patterns.append((pattern, key, m))
         # This will match the files for which we need to care
         # about inconsistent newlines.
-        self.match = match.match(root, '', [], include, exclude)
+        self.match = match.match(root, b'', [], include, exclude)
 
     def copytoui(self, ui):
         for pattern, key, m in self.patterns:
             try:
-                ui.setconfig('decode', pattern, self._decode[key], 'eol')
-                ui.setconfig('encode', pattern, self._encode[key], 'eol')
+                ui.setconfig(b'decode', pattern, self._decode[key], b'eol')
+                ui.setconfig(b'encode', pattern, self._encode[key], b'eol')
             except KeyError:
                 ui.warn(
-                    _("ignoring unknown EOL style '%s' from %s\n")
-                    % (key, self.cfg.source('patterns', pattern))
+                    _(b"ignoring unknown EOL style '%s' from %s\n")
+                    % (key, self.cfg.source(b'patterns', pattern))
                 )
         # eol.only-consistent can be specified in ~/.hgrc or .hgeol
-        for k, v in self.cfg.items('eol'):
-            ui.setconfig('eol', k, v, 'eol')
+        for k, v in self.cfg.items(b'eol'):
+            ui.setconfig(b'eol', k, v, b'eol')
 
     def checkrev(self, repo, ctx, files):
         failed = []
@@ -237,9 +245,9 @@
                 target = self._encode[key]
                 data = ctx[f].data()
                 if (
-                    target == "to-lf"
-                    and "\r\n" in data
-                    or target == "to-crlf"
+                    target == b"to-lf"
+                    and b"\r\n" in data
+                    or target == b"to-crlf"
                     and singlelf.search(data)
                 ):
                     failed.append((f, target, bytes(ctx)))
@@ -254,15 +262,18 @@
                 if node is None:
                     # Cannot use workingctx.data() since it would load
                     # and cache the filters before we configure them.
-                    data = repo.wvfs('.hgeol').read()
+                    data = repo.wvfs(b'.hgeol').read()
                 else:
-                    data = repo[node]['.hgeol'].data()
+                    data = repo[node][b'.hgeol'].data()
                 return eolfile(ui, repo.root, data)
             except (IOError, LookupError):
                 pass
     except errormod.ParseError as inst:
         ui.warn(
-            _("warning: ignoring .hgeol file due to parse error " "at %s: %s\n")
+            _(
+                b"warning: ignoring .hgeol file due to parse error "
+                b"at %s: %s\n"
+            )
             % (inst.args[1], inst.args[0])
         )
     return None
@@ -276,10 +287,10 @@
     never loaded. This function ensure the extension is enabled when running
     hooks.
     """
-    if 'eol' in ui._knownconfig:
+    if b'eol' in ui._knownconfig:
         return
-    ui.setconfig('extensions', 'eol', '', source='internal')
-    extensions.loadall(ui, ['eol'])
+    ui.setconfig(b'extensions', b'eol', b'', source=b'internal')
+    extensions.loadall(ui, [b'eol'])
 
 
 def _checkhook(ui, repo, node, headsonly):
@@ -302,14 +313,16 @@
             failed.extend(eol.checkrev(repo, ctx, files))
 
     if failed:
-        eols = {'to-lf': 'CRLF', 'to-crlf': 'LF'}
+        eols = {b'to-lf': b'CRLF', b'to-crlf': b'LF'}
         msgs = []
         for f, target, node in sorted(failed):
             msgs.append(
-                _("  %s in %s should not have %s line endings")
+                _(b"  %s in %s should not have %s line endings")
                 % (f, node, eols[target])
             )
-        raise errormod.Abort(_("end-of-line check failed:\n") + "\n".join(msgs))
+        raise errormod.Abort(
+            _(b"end-of-line check failed:\n") + b"\n".join(msgs)
+        )
 
 
 def checkallhook(ui, repo, node, hooktype, **kwargs):
@@ -333,16 +346,16 @@
 
 
 def uisetup(ui):
-    ui.setconfig('hooks', 'preupdate.eol', preupdate, 'eol')
+    ui.setconfig(b'hooks', b'preupdate.eol', preupdate, b'eol')
 
 
 def extsetup(ui):
     try:
-        extensions.find('win32text')
+        extensions.find(b'win32text')
         ui.warn(
             _(
-                "the eol extension is incompatible with the "
-                "win32text extension\n"
+                b"the eol extension is incompatible with the "
+                b"win32text extension\n"
             )
         )
     except KeyError:
@@ -357,7 +370,7 @@
     for name, fn in filters.iteritems():
         repo.adddatafilter(name, fn)
 
-    ui.setconfig('patch', 'eol', 'auto', 'eol')
+    ui.setconfig(b'patch', b'eol', b'auto', b'eol')
 
     class eolrepo(repo.__class__):
         def loadeol(self, nodes):
@@ -368,37 +381,37 @@
             return eol.match
 
         def _hgcleardirstate(self):
-            self._eolmatch = self.loadeol([None, 'tip'])
+            self._eolmatch = self.loadeol([None, b'tip'])
             if not self._eolmatch:
                 self._eolmatch = util.never
                 return
 
             oldeol = None
             try:
-                cachemtime = os.path.getmtime(self.vfs.join("eol.cache"))
+                cachemtime = os.path.getmtime(self.vfs.join(b"eol.cache"))
             except OSError:
                 cachemtime = 0
             else:
-                olddata = self.vfs.read("eol.cache")
+                olddata = self.vfs.read(b"eol.cache")
                 if olddata:
                     oldeol = eolfile(self.ui, self.root, olddata)
 
             try:
-                eolmtime = os.path.getmtime(self.wjoin(".hgeol"))
+                eolmtime = os.path.getmtime(self.wjoin(b".hgeol"))
             except OSError:
                 eolmtime = 0
 
             if eolmtime > cachemtime:
-                self.ui.debug("eol: detected change in .hgeol\n")
+                self.ui.debug(b"eol: detected change in .hgeol\n")
 
-                hgeoldata = self.wvfs.read('.hgeol')
+                hgeoldata = self.wvfs.read(b'.hgeol')
                 neweol = eolfile(self.ui, self.root, hgeoldata)
 
                 wlock = None
                 try:
                     wlock = self.wlock()
                     for f in self.dirstate:
-                        if self.dirstate[f] != 'n':
+                        if self.dirstate[f] != b'n':
                             continue
                         if oldeol is not None:
                             if not oldeol.match(f) and not neweol.match(f):
@@ -419,7 +432,7 @@
                         # the new .hgeol file specify a different filter
                         self.dirstate.normallookup(f)
                     # Write the cache to update mtime and cache .hgeol
-                    with self.vfs("eol.cache", "w") as f:
+                    with self.vfs(b"eol.cache", b"w") as f:
                         f.write(hgeoldata)
                 except errormod.LockUnavailable:
                     # If we cannot lock the repository and clear the
@@ -447,7 +460,7 @@
                     continue
                 if inconsistenteol(data):
                     raise errormod.Abort(
-                        _("inconsistent newline style " "in %s\n") % f
+                        _(b"inconsistent newline style " b"in %s\n") % f
                     )
             return super(eolrepo, self).commitctx(ctx, error, origctx)
 
--- a/hgext/extdiff.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/extdiff.py	Sun Oct 06 09:48:39 2019 -0400
@@ -118,26 +118,26 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    'extdiff', br'opts\..*', default='', generic=True,
+    b'extdiff', br'opts\..*', default=b'', generic=True,
 )
 
 configitem(
-    'extdiff', br'gui\..*', generic=True,
+    b'extdiff', br'gui\..*', generic=True,
 )
 
 configitem(
-    'diff-tools', br'.*\.diffargs$', default=None, generic=True,
+    b'diff-tools', br'.*\.diffargs$', default=None, generic=True,
 )
 
 configitem(
-    'diff-tools', br'.*\.gui$', generic=True,
+    b'diff-tools', br'.*\.gui$', generic=True,
 )
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 
 def snapshot(ui, repo, files, node, tmproot, listsubrepos):
@@ -145,40 +145,40 @@
     if not using snapshot, -I/-X does not work and recursive diff
     in tools like kdiff3 and meld displays too many files.'''
     dirname = os.path.basename(repo.root)
-    if dirname == "":
-        dirname = "root"
+    if dirname == b"":
+        dirname = b"root"
     if node is not None:
-        dirname = '%s.%s' % (dirname, short(node))
+        dirname = b'%s.%s' % (dirname, short(node))
     base = os.path.join(tmproot, dirname)
     os.mkdir(base)
     fnsandstat = []
 
     if node is not None:
         ui.note(
-            _('making snapshot of %d files from rev %s\n')
+            _(b'making snapshot of %d files from rev %s\n')
             % (len(files), short(node))
         )
     else:
         ui.note(
-            _('making snapshot of %d files from working directory\n')
+            _(b'making snapshot of %d files from working directory\n')
             % (len(files))
         )
 
     if files:
-        repo.ui.setconfig("ui", "archivemeta", False)
+        repo.ui.setconfig(b"ui", b"archivemeta", False)
 
         archival.archive(
             repo,
             base,
             node,
-            'files',
+            b'files',
             match=scmutil.matchfiles(repo, files),
             subrepos=listsubrepos,
         )
 
         for fn in sorted(files):
             wfn = util.pconvert(fn)
-            ui.note('  %s\n' % wfn)
+            ui.note(b'  %s\n' % wfn)
 
             if node is None:
                 dest = os.path.join(base, wfn)
@@ -202,20 +202,20 @@
     # When not operating in 3-way mode, an empty string is
     # returned for parent2
     replace = {
-        'parent': parent1,
-        'parent1': parent1,
-        'parent2': parent2,
-        'plabel1': plabel1,
-        'plabel2': plabel2,
-        'child': child,
-        'clabel': clabel,
-        'root': repo_root,
+        b'parent': parent1,
+        b'parent1': parent1,
+        b'parent2': parent2,
+        b'plabel1': plabel1,
+        b'plabel2': plabel2,
+        b'child': child,
+        b'clabel': clabel,
+        b'root': repo_root,
     }
 
     def quote(match):
         pre = match.group(2)
         key = match.group(3)
-        if not do3way and key == 'parent2':
+        if not do3way and key == b'parent2':
             return pre
         return pre + procutil.shellquote(replace[key])
 
@@ -225,7 +225,7 @@
         br'\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)\1'
     )
     if not do3way and not re.search(regex, cmdline):
-        cmdline += ' $parent1 $child'
+        cmdline += b' $parent1 $child'
     return re.sub(regex, quote, cmdline)
 
 
@@ -273,8 +273,8 @@
         if not os.path.isfile(path1a):
             path1a = os.devnull
 
-        path1b = ''
-        label1b = ''
+        path1b = b''
+        label1b = b''
         if do3way:
             path1b = os.path.join(tmproot, dir1b, commonfile)
             label1b = commonfile + rev1b
@@ -286,24 +286,24 @@
 
         if confirm:
             # Prompt before showing this diff
-            difffiles = _('diff %s (%d of %d)') % (
+            difffiles = _(b'diff %s (%d of %d)') % (
                 commonfile,
                 idx + 1,
                 totalfiles,
             )
             responses = _(
-                '[Yns?]'
-                '$$ &Yes, show diff'
-                '$$ &No, skip this diff'
-                '$$ &Skip remaining diffs'
-                '$$ &? (display help)'
+                b'[Yns?]'
+                b'$$ &Yes, show diff'
+                b'$$ &No, skip this diff'
+                b'$$ &Skip remaining diffs'
+                b'$$ &? (display help)'
             )
-            r = ui.promptchoice('%s %s' % (difffiles, responses))
+            r = ui.promptchoice(b'%s %s' % (difffiles, responses))
             if r == 3:  # ?
                 while r == 3:
                     for c, t in ui.extractchoices(responses)[1]:
-                        ui.write('%s - %s\n' % (c, encoding.lower(t)))
-                    r = ui.promptchoice('%s %s' % (difffiles, responses))
+                        ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
+                    r = ui.promptchoice(b'%s %s' % (difffiles, responses))
             if r == 0:  # yes
                 pass
             elif r == 1:  # no
@@ -331,22 +331,22 @@
             # as we know, the tool doesn't have a GUI, in which case
             # we can't run multiple CLI programs at the same time.
             ui.debug(
-                'running %r in %s\n' % (pycompat.bytestr(curcmdline), tmproot)
+                b'running %r in %s\n' % (pycompat.bytestr(curcmdline), tmproot)
             )
-            ui.system(curcmdline, cwd=tmproot, blockedtag='extdiff')
+            ui.system(curcmdline, cwd=tmproot, blockedtag=b'extdiff')
         else:
             # Run the comparison program but don't wait, as we're
             # going to rapid-fire each file diff and then wait on
             # the whole group.
             ui.debug(
-                'running %r in %s (backgrounded)\n'
+                b'running %r in %s (backgrounded)\n'
                 % (pycompat.bytestr(curcmdline), tmproot)
             )
             proc = _systembackground(curcmdline, cwd=tmproot)
             waitprocs.append(proc)
 
     if waitprocs:
-        with ui.timeblockedsection('extdiff'):
+        with ui.timeblockedsection(b'extdiff'):
             for proc in waitprocs:
                 proc.wait()
 
@@ -360,12 +360,12 @@
     - just invoke the diff for a single file in the working dir
     '''
 
-    revs = opts.get('rev')
-    change = opts.get('change')
-    do3way = '$parent2' in cmdline
+    revs = opts.get(b'rev')
+    change = opts.get(b'change')
+    do3way = b'$parent2' in cmdline
 
     if revs and change:
-        msg = _('cannot specify --rev and --change at the same time')
+        msg = _(b'cannot specify --rev and --change at the same time')
         raise error.Abort(msg)
     elif change:
         ctx2 = scmutil.revsingle(repo, change, None)
@@ -377,8 +377,8 @@
         else:
             ctx1b = repo[nullid]
 
-    perfile = opts.get('per_file')
-    confirm = opts.get('confirm')
+    perfile = opts.get(b'per_file')
+    confirm = opts.get(b'confirm')
 
     node1a = ctx1a.node()
     node1b = ctx1b.node()
@@ -389,17 +389,17 @@
         if node1b == nullid:
             do3way = False
 
-    subrepos = opts.get('subrepos')
+    subrepos = opts.get(b'subrepos')
 
     matcher = scmutil.match(repo[node2], pats, opts)
 
-    if opts.get('patch'):
+    if opts.get(b'patch'):
         if subrepos:
-            raise error.Abort(_('--patch cannot be used with --subrepos'))
+            raise error.Abort(_(b'--patch cannot be used with --subrepos'))
         if perfile:
-            raise error.Abort(_('--patch cannot be used with --per-file'))
+            raise error.Abort(_(b'--patch cannot be used with --per-file'))
         if node2 is None:
-            raise error.Abort(_('--patch requires two revisions'))
+            raise error.Abort(_(b'--patch requires two revisions'))
     else:
         mod_a, add_a, rem_a = map(
             set, repo.status(node1a, node2, matcher, listsubrepos=subrepos)[:3]
@@ -416,33 +416,33 @@
         if not common:
             return 0
 
-    tmproot = pycompat.mkdtemp(prefix='extdiff.')
+    tmproot = pycompat.mkdtemp(prefix=b'extdiff.')
     try:
-        if not opts.get('patch'):
+        if not opts.get(b'patch'):
             # Always make a copy of node1a (and node1b, if applicable)
             dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
             dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot, subrepos)[
                 0
             ]
-            rev1a = '@%d' % repo[node1a].rev()
+            rev1a = b'@%d' % repo[node1a].rev()
             if do3way:
                 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
                 dir1b = snapshot(
                     ui, repo, dir1b_files, node1b, tmproot, subrepos
                 )[0]
-                rev1b = '@%d' % repo[node1b].rev()
+                rev1b = b'@%d' % repo[node1b].rev()
             else:
                 dir1b = None
-                rev1b = ''
+                rev1b = b''
 
             fnsandstat = []
 
             # If node2 in not the wc or there is >1 change, copy it
-            dir2root = ''
-            rev2 = ''
+            dir2root = b''
+            rev2 = b''
             if node2:
                 dir2 = snapshot(ui, repo, modadd, node2, tmproot, subrepos)[0]
-                rev2 = '@%d' % repo[node2].rev()
+                rev2 = b'@%d' % repo[node2].rev()
             elif len(common) > 1:
                 # we only actually need to get the files to copy back to
                 # the working dir in this case (because the other cases
@@ -453,7 +453,7 @@
                 )
             else:
                 # This lets the diff tool open the changed file directly
-                dir2 = ''
+                dir2 = b''
                 dir2root = repo.root
 
             label1a = rev1a
@@ -476,8 +476,8 @@
                 dir2 = os.path.join(dir2root, dir2, common_file)
                 label2 = common_file + rev2
         else:
-            template = 'hg-%h.patch'
-            with formatter.nullformatter(ui, 'extdiff', {}) as fm:
+            template = b'hg-%h.patch'
+            with formatter.nullformatter(ui, b'extdiff', {}) as fm:
                 cmdutil.export(
                     repo,
                     [repo[node1a].rev(), repo[node2].rev()],
@@ -507,9 +507,9 @@
                 clabel=label2,
             )
             ui.debug(
-                'running %r in %s\n' % (pycompat.bytestr(cmdline), tmproot)
+                b'running %r in %s\n' % (pycompat.bytestr(cmdline), tmproot)
             )
-            ui.system(cmdline, cwd=tmproot, blockedtag='extdiff')
+            ui.system(cmdline, cwd=tmproot, blockedtag=b'extdiff')
         else:
             # Run the external tool once for each pair of files
             _runperfilediff(
@@ -545,35 +545,41 @@
                 or (cpstat.st_mode & 0o100) != (st.st_mode & 0o100)
             ):
                 ui.debug(
-                    'file changed while diffing. '
-                    'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn)
+                    b'file changed while diffing. '
+                    b'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn)
                 )
                 util.copyfile(copy_fn, working_fn)
 
         return 1
     finally:
-        ui.note(_('cleaning up temp directory\n'))
+        ui.note(_(b'cleaning up temp directory\n'))
         shutil.rmtree(tmproot)
 
 
 extdiffopts = (
     [
-        ('o', 'option', [], _('pass option to comparison program'), _('OPT')),
-        ('r', 'rev', [], _('revision'), _('REV')),
-        ('c', 'change', '', _('change made by revision'), _('REV')),
         (
-            '',
-            'per-file',
+            b'o',
+            b'option',
+            [],
+            _(b'pass option to comparison program'),
+            _(b'OPT'),
+        ),
+        (b'r', b'rev', [], _(b'revision'), _(b'REV')),
+        (b'c', b'change', b'', _(b'change made by revision'), _(b'REV')),
+        (
+            b'',
+            b'per-file',
             False,
-            _('compare each file instead of revision snapshots'),
+            _(b'compare each file instead of revision snapshots'),
         ),
         (
-            '',
-            'confirm',
+            b'',
+            b'confirm',
             False,
-            _('prompt user before each external program invocation'),
+            _(b'prompt user before each external program invocation'),
         ),
-        ('', 'patch', None, _('compare patches for two revisions')),
+        (b'', b'patch', None, _(b'compare patches for two revisions')),
     ]
     + cmdutil.walkopts
     + cmdutil.subrepoopts
@@ -581,10 +587,10 @@
 
 
 @command(
-    'extdiff',
-    [('p', 'program', '', _('comparison program to run'), _('CMD')),]
+    b'extdiff',
+    [(b'p', b'program', b'', _(b'comparison program to run'), _(b'CMD')),]
     + extdiffopts,
-    _('hg extdiff [OPT]... [FILE]...'),
+    _(b'hg extdiff [OPT]... [FILE]...'),
     helpcategory=command.CATEGORY_FILE_CONTENTS,
     inferrepo=True,
 )
@@ -620,12 +626,12 @@
     the external program. It is ignored if --per-file isn't specified.
     '''
     opts = pycompat.byteskwargs(opts)
-    program = opts.get('program')
-    option = opts.get('option')
+    program = opts.get(b'program')
+    option = opts.get(b'option')
     if not program:
-        program = 'diff'
-        option = option or ['-Npru']
-    cmdline = ' '.join(map(procutil.shellquote, [program] + option))
+        program = b'diff'
+        option = option or [b'-Npru']
+    cmdline = b' '.join(map(procutil.shellquote, [program] + option))
     return dodiff(ui, repo, cmdline, pats, opts)
 
 
@@ -655,29 +661,29 @@
 
     def __call__(self, ui, repo, *pats, **opts):
         opts = pycompat.byteskwargs(opts)
-        options = ' '.join(map(procutil.shellquote, opts['option']))
+        options = b' '.join(map(procutil.shellquote, opts[b'option']))
         if options:
-            options = ' ' + options
+            options = b' ' + options
         return dodiff(
             ui, repo, self._cmdline + options, pats, opts, guitool=self._isgui
         )
 
 
 def uisetup(ui):
-    for cmd, path in ui.configitems('extdiff'):
+    for cmd, path in ui.configitems(b'extdiff'):
         path = util.expandpath(path)
-        if cmd.startswith('cmd.'):
+        if cmd.startswith(b'cmd.'):
             cmd = cmd[4:]
             if not path:
                 path = procutil.findexe(cmd)
                 if path is None:
                     path = filemerge.findexternaltool(ui, cmd) or cmd
-            diffopts = ui.config('extdiff', 'opts.' + cmd)
+            diffopts = ui.config(b'extdiff', b'opts.' + cmd)
             cmdline = procutil.shellquote(path)
             if diffopts:
-                cmdline += ' ' + diffopts
-            isgui = ui.configbool('extdiff', 'gui.' + cmd)
-        elif cmd.startswith('opts.') or cmd.startswith('gui.'):
+                cmdline += b' ' + diffopts
+            isgui = ui.configbool(b'extdiff', b'gui.' + cmd)
+        elif cmd.startswith(b'opts.') or cmd.startswith(b'gui.'):
             continue
         else:
             if path:
@@ -691,21 +697,21 @@
                     path = filemerge.findexternaltool(ui, cmd) or cmd
                 cmdline = procutil.shellquote(path)
                 diffopts = False
-            isgui = ui.configbool('extdiff', 'gui.' + cmd)
+            isgui = ui.configbool(b'extdiff', b'gui.' + cmd)
         # look for diff arguments in [diff-tools] then [merge-tools]
         if not diffopts:
-            key = cmd + '.diffargs'
-            for section in ('diff-tools', 'merge-tools'):
+            key = cmd + b'.diffargs'
+            for section in (b'diff-tools', b'merge-tools'):
                 args = ui.config(section, key)
                 if args:
-                    cmdline += ' ' + args
+                    cmdline += b' ' + args
                     if isgui is None:
-                        isgui = ui.configbool(section, cmd + '.gui') or False
+                        isgui = ui.configbool(section, cmd + b'.gui') or False
                     break
         command(
             cmd,
             extdiffopts[:],
-            _('hg %s [OPTION]... [FILE]...') % cmd,
+            _(b'hg %s [OPTION]... [FILE]...') % cmd,
             helpcategory=command.CATEGORY_FILE_CONTENTS,
             inferrepo=True,
         )(savedcmd(path, cmdline, isgui))
--- a/hgext/factotum.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/factotum.py	Sun Oct 06 09:48:39 2019 -0400
@@ -69,44 +69,44 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    'factotum', 'executable', default='/bin/auth/factotum',
+    b'factotum', b'executable', default=b'/bin/auth/factotum',
 )
 configitem(
-    'factotum', 'mountpoint', default='/mnt/factotum',
+    b'factotum', b'mountpoint', default=b'/mnt/factotum',
 )
 configitem(
-    'factotum', 'service', default='hg',
+    b'factotum', b'service', default=b'hg',
 )
 
 
 def auth_getkey(self, params):
     if not self.ui.interactive():
-        raise error.Abort(_('factotum not interactive'))
-    if 'user=' not in params:
-        params = '%s user?' % params
-    params = '%s !password?' % params
-    os.system(procutil.tonativestr("%s -g '%s'" % (_executable, params)))
+        raise error.Abort(_(b'factotum not interactive'))
+    if b'user=' not in params:
+        params = b'%s user?' % params
+    params = b'%s !password?' % params
+    os.system(procutil.tonativestr(b"%s -g '%s'" % (_executable, params)))
 
 
 def auth_getuserpasswd(self, getkey, params):
-    params = 'proto=pass %s' % params
+    params = b'proto=pass %s' % params
     while True:
-        fd = os.open('%s/rpc' % _mountpoint, os.O_RDWR)
+        fd = os.open(b'%s/rpc' % _mountpoint, os.O_RDWR)
         try:
-            os.write(fd, 'start %s' % params)
+            os.write(fd, b'start %s' % params)
             l = os.read(fd, ERRMAX).split()
-            if l[0] == 'ok':
-                os.write(fd, 'read')
+            if l[0] == b'ok':
+                os.write(fd, b'read')
                 status, user, passwd = os.read(fd, ERRMAX).split(None, 2)
-                if status == 'ok':
-                    if passwd.startswith("'"):
-                        if passwd.endswith("'"):
-                            passwd = passwd[1:-1].replace("''", "'")
+                if status == b'ok':
+                    if passwd.startswith(b"'"):
+                        if passwd.endswith(b"'"):
+                            passwd = passwd[1:-1].replace(b"''", b"'")
                         else:
-                            raise error.Abort(_('malformed password string'))
+                            raise error.Abort(_(b'malformed password string'))
                     return (user, passwd)
         except (OSError, IOError):
-            raise error.Abort(_('factotum not responding'))
+            raise error.Abort(_(b'factotum not responding'))
         finally:
             os.close(fd)
         getkey(self, params)
@@ -127,18 +127,18 @@
         self._writedebug(user, passwd)
         return (user, passwd)
 
-    prefix = ''
+    prefix = b''
     res = httpconnection.readauthforuri(self.ui, authuri, user)
     if res:
         _, auth = res
-        prefix = auth.get('prefix')
-        user, passwd = auth.get('username'), auth.get('password')
+        prefix = auth.get(b'prefix')
+        user, passwd = auth.get(b'username'), auth.get(b'password')
     if not user or not passwd:
         if not prefix:
-            prefix = realm.split(' ')[0].lower()
-        params = 'service=%s prefix=%s' % (_service, prefix)
+            prefix = realm.split(b' ')[0].lower()
+        params = b'service=%s prefix=%s' % (_service, prefix)
         if user:
-            params = '%s user=%s' % (params, user)
+            params = b'%s user=%s' % (params, user)
         user, passwd = auth_getuserpasswd(self, auth_getkey, params)
 
     self.add_password(realm, authuri, user, passwd)
@@ -148,8 +148,8 @@
 
 def uisetup(ui):
     global _executable
-    _executable = ui.config('factotum', 'executable')
+    _executable = ui.config(b'factotum', b'executable')
     global _mountpoint
-    _mountpoint = ui.config('factotum', 'mountpoint')
+    _mountpoint = ui.config(b'factotum', b'mountpoint')
     global _service
-    _service = ui.config('factotum', 'service')
+    _service = ui.config(b'factotum', b'service')
--- a/hgext/fastannotate/__init__.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/fastannotate/__init__.py	Sun Oct 06 09:48:39 2019 -0400
@@ -119,56 +119,56 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 cmdtable = commands.cmdtable
 
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('fastannotate', 'modes', default=['fastannotate'])
-configitem('fastannotate', 'server', default=False)
-configitem('fastannotate', 'client', default=False)
-configitem('fastannotate', 'unfilteredrepo', default=True)
-configitem('fastannotate', 'defaultformat', default=['number'])
-configitem('fastannotate', 'perfhack', default=False)
-configitem('fastannotate', 'mainbranch')
-configitem('fastannotate', 'forcetext', default=True)
-configitem('fastannotate', 'forcefollow', default=True)
-configitem('fastannotate', 'clientfetchthreshold', default=10)
-configitem('fastannotate', 'serverbuildondemand', default=True)
-configitem('fastannotate', 'remotepath', default='default')
+configitem(b'fastannotate', b'modes', default=[b'fastannotate'])
+configitem(b'fastannotate', b'server', default=False)
+configitem(b'fastannotate', b'client', default=False)
+configitem(b'fastannotate', b'unfilteredrepo', default=True)
+configitem(b'fastannotate', b'defaultformat', default=[b'number'])
+configitem(b'fastannotate', b'perfhack', default=False)
+configitem(b'fastannotate', b'mainbranch')
+configitem(b'fastannotate', b'forcetext', default=True)
+configitem(b'fastannotate', b'forcefollow', default=True)
+configitem(b'fastannotate', b'clientfetchthreshold', default=10)
+configitem(b'fastannotate', b'serverbuildondemand', default=True)
+configitem(b'fastannotate', b'remotepath', default=b'default')
 
 
 def uisetup(ui):
-    modes = set(ui.configlist('fastannotate', 'modes'))
-    if 'fctx' in modes:
-        modes.discard('hgweb')
+    modes = set(ui.configlist(b'fastannotate', b'modes'))
+    if b'fctx' in modes:
+        modes.discard(b'hgweb')
     for name in modes:
-        if name == 'fastannotate':
+        if name == b'fastannotate':
             commands.registercommand()
-        elif name == 'hgweb':
+        elif name == b'hgweb':
             from . import support
 
             support.replacehgwebannotate()
-        elif name == 'fctx':
+        elif name == b'fctx':
             from . import support
 
             support.replacefctxannotate()
             commands.wrapdefault()
         else:
-            raise hgerror.Abort(_('fastannotate: invalid mode: %s') % name)
+            raise hgerror.Abort(_(b'fastannotate: invalid mode: %s') % name)
 
-    if ui.configbool('fastannotate', 'server'):
+    if ui.configbool(b'fastannotate', b'server'):
         protocol.serveruisetup(ui)
 
 
 def extsetup(ui):
     # fastannotate has its own locking, without depending on repo lock
     # TODO: avoid mutating this unless the specific repo has it enabled
-    localrepo.localrepository._wlockfreeprefix.add('fastannotate/')
+    localrepo.localrepository._wlockfreeprefix.add(b'fastannotate/')
 
 
 def reposetup(ui, repo):
-    if ui.configbool('fastannotate', 'client'):
+    if ui.configbool(b'fastannotate', b'client'):
         protocol.clientreposetup(ui, repo)
--- a/hgext/fastannotate/commands.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/fastannotate/commands.py	Sun Oct 06 09:48:39 2019 -0400
@@ -34,7 +34,7 @@
 
 def _matchpaths(repo, rev, pats, opts, aopts=facontext.defaultopts):
     """generate paths matching given patterns"""
-    perfhack = repo.ui.configbool('fastannotate', 'perfhack')
+    perfhack = repo.ui.configbool(b'fastannotate', b'perfhack')
 
     # disable perfhack if:
     # a) any walkopt is used
@@ -44,8 +44,8 @@
         # cwd related to reporoot
         reporoot = os.path.dirname(repo.path)
         reldir = os.path.relpath(encoding.getcwd(), reporoot)
-        if reldir == '.':
-            reldir = ''
+        if reldir == b'.':
+            reldir = b''
         if any(opts.get(o[1]) for o in commands.walkopts):  # a)
             perfhack = False
         else:  # b)
@@ -56,7 +56,7 @@
             # disable perfhack on '..' since it allows escaping from the repo
             if any(
                 (
-                    '..' in f
+                    b'..' in f
                     or not os.path.isfile(
                         facontext.pathhelper(repo, f, aopts).linelogpath
                     )
@@ -73,7 +73,7 @@
     else:
 
         def bad(x, y):
-            raise error.Abort("%s: %s" % (x, y))
+            raise error.Abort(b"%s: %s" % (x, y))
 
         ctx = scmutil.revsingle(repo, rev)
         m = scmutil.match(ctx, pats, opts, badfn=bad)
@@ -83,42 +83,57 @@
 
 fastannotatecommandargs = {
     r'options': [
-        ('r', 'rev', '.', _('annotate the specified revision'), _('REV')),
-        ('u', 'user', None, _('list the author (long with -v)')),
-        ('f', 'file', None, _('list the filename')),
-        ('d', 'date', None, _('list the date (short with -q)')),
-        ('n', 'number', None, _('list the revision number (default)')),
-        ('c', 'changeset', None, _('list the changeset')),
+        (b'r', b'rev', b'.', _(b'annotate the specified revision'), _(b'REV')),
+        (b'u', b'user', None, _(b'list the author (long with -v)')),
+        (b'f', b'file', None, _(b'list the filename')),
+        (b'd', b'date', None, _(b'list the date (short with -q)')),
+        (b'n', b'number', None, _(b'list the revision number (default)')),
+        (b'c', b'changeset', None, _(b'list the changeset')),
+        (
+            b'l',
+            b'line-number',
+            None,
+            _(b'show line number at the first ' b'appearance'),
+        ),
         (
-            'l',
-            'line-number',
+            b'e',
+            b'deleted',
             None,
-            _('show line number at the first ' 'appearance'),
+            _(b'show deleted lines (slow) (EXPERIMENTAL)'),
         ),
-        ('e', 'deleted', None, _('show deleted lines (slow) (EXPERIMENTAL)')),
-        ('', 'no-content', None, _('do not show file content (EXPERIMENTAL)')),
-        ('', 'no-follow', None, _("don't follow copies and renames")),
         (
-            '',
-            'linear',
+            b'',
+            b'no-content',
+            None,
+            _(b'do not show file content (EXPERIMENTAL)'),
+        ),
+        (b'', b'no-follow', None, _(b"don't follow copies and renames")),
+        (
+            b'',
+            b'linear',
             None,
             _(
-                'enforce linear history, ignore second parent '
-                'of merges (EXPERIMENTAL)'
+                b'enforce linear history, ignore second parent '
+                b'of merges (EXPERIMENTAL)'
             ),
         ),
-        ('', 'long-hash', None, _('show long changeset hash (EXPERIMENTAL)')),
         (
-            '',
-            'rebuild',
+            b'',
+            b'long-hash',
             None,
-            _('rebuild cache even if it exists ' '(EXPERIMENTAL)'),
+            _(b'show long changeset hash (EXPERIMENTAL)'),
+        ),
+        (
+            b'',
+            b'rebuild',
+            None,
+            _(b'rebuild cache even if it exists ' b'(EXPERIMENTAL)'),
         ),
     ]
     + commands.diffwsopts
     + commands.walkopts
     + commands.formatteropts,
-    r'synopsis': _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
+    r'synopsis': _(b'[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
     r'inferrepo': True,
 }
 
@@ -155,52 +170,55 @@
         affecting results are used.
     """
     if not pats:
-        raise error.Abort(_('at least one filename or pattern is required'))
+        raise error.Abort(_(b'at least one filename or pattern is required'))
 
     # performance hack: filtered repo can be slow. unfilter by default.
-    if ui.configbool('fastannotate', 'unfilteredrepo'):
+    if ui.configbool(b'fastannotate', b'unfilteredrepo'):
         repo = repo.unfiltered()
 
     opts = pycompat.byteskwargs(opts)
 
-    rev = opts.get('rev', '.')
-    rebuild = opts.get('rebuild', False)
+    rev = opts.get(b'rev', b'.')
+    rebuild = opts.get(b'rebuild', False)
 
     diffopts = patch.difffeatureopts(
-        ui, opts, section='annotate', whitespace=True
+        ui, opts, section=b'annotate', whitespace=True
     )
     aopts = facontext.annotateopts(
         diffopts=diffopts,
-        followmerge=not opts.get('linear', False),
-        followrename=not opts.get('no_follow', False),
+        followmerge=not opts.get(b'linear', False),
+        followrename=not opts.get(b'no_follow', False),
     )
 
     if not any(
-        opts.get(s) for s in ['user', 'date', 'file', 'number', 'changeset']
+        opts.get(s)
+        for s in [b'user', b'date', b'file', b'number', b'changeset']
     ):
         # default 'number' for compatibility. but fastannotate is more
         # efficient with "changeset", "line-number" and "no-content".
-        for name in ui.configlist('fastannotate', 'defaultformat', ['number']):
+        for name in ui.configlist(
+            b'fastannotate', b'defaultformat', [b'number']
+        ):
             opts[name] = True
 
-    ui.pager('fastannotate')
-    template = opts.get('template')
-    if template == 'json':
+    ui.pager(b'fastannotate')
+    template = opts.get(b'template')
+    if template == b'json':
         formatter = faformatter.jsonformatter(ui, repo, opts)
     else:
         formatter = faformatter.defaultformatter(ui, repo, opts)
-    showdeleted = opts.get('deleted', False)
-    showlines = not bool(opts.get('no_content'))
-    showpath = opts.get('file', False)
+    showdeleted = opts.get(b'deleted', False)
+    showlines = not bool(opts.get(b'no_content'))
+    showpath = opts.get(b'file', False)
 
     # find the head of the main (master) branch
-    master = ui.config('fastannotate', 'mainbranch') or rev
+    master = ui.config(b'fastannotate', b'mainbranch') or rev
 
     # paths will be used for prefetching and the real annotating
     paths = list(_matchpaths(repo, rev, pats, opts, aopts))
 
     # for client, prefetch from the server
-    if util.safehasattr(repo, 'prefetchfastannotate'):
+    if util.safehasattr(repo, b'prefetchfastannotate'):
         repo.prefetchfastannotate(paths)
 
     for path in paths:
@@ -238,7 +256,7 @@
 
 _newopts = set()
 _knownopts = {
-    opt[1].replace('-', '_')
+    opt[1].replace(b'-', b'_')
     for opt in (fastannotatecommandargs[r'options'] + commands.globalopts)
 }
 
@@ -246,16 +264,16 @@
 def _annotatewrapper(orig, ui, repo, *pats, **opts):
     """used by wrapdefault"""
     # we need this hack until the obsstore has 0.0 seconds perf impact
-    if ui.configbool('fastannotate', 'unfilteredrepo'):
+    if ui.configbool(b'fastannotate', b'unfilteredrepo'):
         repo = repo.unfiltered()
 
     # treat the file as text (skip the isbinary check)
-    if ui.configbool('fastannotate', 'forcetext'):
+    if ui.configbool(b'fastannotate', b'forcetext'):
         opts[r'text'] = True
 
     # check if we need to do prefetch (client-side)
     rev = opts.get(r'rev')
-    if util.safehasattr(repo, 'prefetchfastannotate') and rev is not None:
+    if util.safehasattr(repo, b'prefetchfastannotate') and rev is not None:
         paths = list(_matchpaths(repo, rev, pats, pycompat.byteskwargs(opts)))
         repo.prefetchfastannotate(paths)
 
@@ -264,20 +282,20 @@
 
 def registercommand():
     """register the fastannotate command"""
-    name = 'fastannotate|fastblame|fa'
+    name = b'fastannotate|fastblame|fa'
     command(name, helpbasic=True, **fastannotatecommandargs)(fastannotate)
 
 
 def wrapdefault():
     """wrap the default annotate command, to be aware of the protocol"""
-    extensions.wrapcommand(commands.table, 'annotate', _annotatewrapper)
+    extensions.wrapcommand(commands.table, b'annotate', _annotatewrapper)
 
 
 @command(
-    'debugbuildannotatecache',
-    [('r', 'rev', '', _('build up to the specific revision'), _('REV'))]
+    b'debugbuildannotatecache',
+    [(b'r', b'rev', b'', _(b'build up to the specific revision'), _(b'REV'))]
     + commands.walkopts,
-    _('[-r REV] FILE...'),
+    _(b'[-r REV] FILE...'),
 )
 def debugbuildannotatecache(ui, repo, *pats, **opts):
     """incrementally build fastannotate cache up to REV for specified files
@@ -291,25 +309,25 @@
     options and lives in '.hg/fastannotate/default'.
     """
     opts = pycompat.byteskwargs(opts)
-    rev = opts.get('REV') or ui.config('fastannotate', 'mainbranch')
+    rev = opts.get(b'REV') or ui.config(b'fastannotate', b'mainbranch')
     if not rev:
         raise error.Abort(
-            _('you need to provide a revision'),
-            hint=_('set fastannotate.mainbranch or use --rev'),
+            _(b'you need to provide a revision'),
+            hint=_(b'set fastannotate.mainbranch or use --rev'),
         )
-    if ui.configbool('fastannotate', 'unfilteredrepo'):
+    if ui.configbool(b'fastannotate', b'unfilteredrepo'):
         repo = repo.unfiltered()
     ctx = scmutil.revsingle(repo, rev)
     m = scmutil.match(ctx, pats, opts)
     paths = list(ctx.walk(m))
-    if util.safehasattr(repo, 'prefetchfastannotate'):
+    if util.safehasattr(repo, b'prefetchfastannotate'):
         # client
-        if opts.get('REV'):
-            raise error.Abort(_('--rev cannot be used for client'))
+        if opts.get(b'REV'):
+            raise error.Abort(_(b'--rev cannot be used for client'))
         repo.prefetchfastannotate(paths)
     else:
         # server, or full repo
-        progress = ui.makeprogress(_('building'), total=len(paths))
+        progress = ui.makeprogress(_(b'building'), total=len(paths))
         for i, path in enumerate(paths):
             progress.update(i)
             with facontext.annotatecontext(repo, path) as actx:
@@ -321,7 +339,7 @@
                     # the cache is broken (could happen with renaming so the
                     # file history gets invalidated). rebuild and try again.
                     ui.debug(
-                        'fastannotate: %s: rebuilding broken cache\n' % path
+                        b'fastannotate: %s: rebuilding broken cache\n' % path
                     )
                     actx.rebuild()
                     try:
@@ -331,8 +349,8 @@
                         # cache for other files.
                         ui.warn(
                             _(
-                                'fastannotate: %s: failed to '
-                                'build cache: %r\n'
+                                b'fastannotate: %s: failed to '
+                                b'build cache: %r\n'
                             )
                             % (path, ex)
                         )
--- a/hgext/fastannotate/context.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/fastannotate/context.py	Sun Oct 06 09:48:39 2019 -0400
@@ -52,7 +52,7 @@
     # renamed filectx won't have a filelog yet, so set it
     # from the cache to save time
     for p in pl:
-        if not '_filelog' in p.__dict__:
+        if not b'_filelog' in p.__dict__:
             p._filelog = _getflog(f._repo, p.path())
 
     return pl
@@ -62,8 +62,8 @@
 # so it takes a fctx instead of a pair of text and fctx.
 def _decorate(fctx):
     text = fctx.data()
-    linecount = text.count('\n')
-    if text and not text.endswith('\n'):
+    linecount = text.count(b'\n')
+    if text and not text.endswith(b'\n'):
         linecount += 1
     return ([(fctx, i) for i in pycompat.xrange(linecount)], text)
 
@@ -75,7 +75,7 @@
     for (a1, a2, b1, b2), t in blocks:
         # Changed blocks ('!') or blocks made only of blank lines ('~')
         # belong to the child.
-        if t == '=':
+        if t == b'=':
             child[0][b1:b2] = parent[0][a1:a2]
     return child
 
@@ -119,7 +119,7 @@
             fctx = repo.filectx(path, changeid=ctx.rev())
     else:
         fctx = ctx[path]
-        if adjustctx == 'linkrev':
+        if adjustctx == b'linkrev':
             introrev = fctx.linkrev()
         else:
             introrev = fctx.introrev()
@@ -132,10 +132,10 @@
 # like mercurial.store.encodedir, but use linelog suffixes: .m, .l, .lock
 def encodedir(path):
     return (
-        path.replace('.hg/', '.hg.hg/')
-        .replace('.l/', '.l.hg/')
-        .replace('.m/', '.m.hg/')
-        .replace('.lock/', '.lock.hg/')
+        path.replace(b'.hg/', b'.hg.hg/')
+        .replace(b'.l/', b'.l.hg/')
+        .replace(b'.m/', b'.m.hg/')
+        .replace(b'.lock/', b'.lock.hg/')
     )
 
 
@@ -157,9 +157,9 @@
     """
 
     defaults = {
-        'diffopts': None,
-        'followrename': True,
-        'followmerge': True,
+        b'diffopts': None,
+        b'followrename': True,
+        b'followmerge': True,
     }
 
     def __init__(self, **opts):
@@ -170,17 +170,17 @@
     @util.propertycache
     def shortstr(self):
         """represent opts in a short string, suitable for a directory name"""
-        result = ''
+        result = b''
         if not self.followrename:
-            result += 'r0'
+            result += b'r0'
         if not self.followmerge:
-            result += 'm0'
+            result += b'm0'
         if self.diffopts is not None:
             assert isinstance(self.diffopts, mdiff.diffopts)
             diffopthash = hashdiffopts(self.diffopts)
             if diffopthash != _defaultdiffopthash:
-                result += 'i' + diffopthash
-        return result or 'default'
+                result += b'i' + diffopthash
+        return result or b'default'
 
 
 defaultopts = annotateopts()
@@ -206,7 +206,7 @@
     def linelog(self):
         if self._linelog is None:
             if os.path.exists(self.linelogpath):
-                with open(self.linelogpath, 'rb') as f:
+                with open(self.linelogpath, b'rb') as f:
                     try:
                         self._linelog = linelogmod.linelog.fromdata(f.read())
                     except linelogmod.LineLogError:
@@ -226,7 +226,7 @@
             self._revmap.flush()
             self._revmap = None
         if self._linelog is not None:
-            with open(self.linelogpath, 'wb') as f:
+            with open(self.linelogpath, b'wb') as f:
                 f.write(self._linelog.encode())
             self._linelog = None
 
@@ -308,11 +308,11 @@
         if directly:
             if self.ui.debugflag:
                 self.ui.debug(
-                    'fastannotate: %s: using fast path '
-                    '(resolved fctx: %s)\n'
+                    b'fastannotate: %s: using fast path '
+                    b'(resolved fctx: %s)\n'
                     % (
                         self.path,
-                        stringutil.pprint(util.safehasattr(revfctx, 'node')),
+                        stringutil.pprint(util.safehasattr(revfctx, b'node')),
                     )
                 )
             return self.annotatedirectly(revfctx, showpath, showlines)
@@ -356,8 +356,8 @@
         if masterfctx:
             if masterfctx.rev() is None:
                 raise error.Abort(
-                    _('cannot update linelog to wdir()'),
-                    hint=_('set fastannotate.mainbranch'),
+                    _(b'cannot update linelog to wdir()'),
+                    hint=_(b'set fastannotate.mainbranch'),
                 )
             initvisit.append(masterfctx)
         visit = initvisit[:]
@@ -403,13 +403,13 @@
         if self.ui.debugflag:
             if newmainbranch:
                 self.ui.debug(
-                    'fastannotate: %s: %d new changesets in the main'
-                    ' branch\n' % (self.path, len(newmainbranch))
+                    b'fastannotate: %s: %d new changesets in the main'
+                    b' branch\n' % (self.path, len(newmainbranch))
                 )
             elif not hist:  # no joints, no updates
                 self.ui.debug(
-                    'fastannotate: %s: linelog cannot help in '
-                    'annotating this revision\n' % self.path
+                    b'fastannotate: %s: linelog cannot help in '
+                    b'annotating this revision\n' % self.path
                 )
 
         # prepare annotateresult so we can update linelog incrementally
@@ -418,7 +418,7 @@
         # 3rd DFS does the actual annotate
         visit = initvisit[:]
         progress = self.ui.makeprogress(
-            'building cache', total=len(newmainbranch)
+            b'building cache', total=len(newmainbranch)
         )
         while visit:
             f = visit[-1]
@@ -463,7 +463,7 @@
                 if len(pl) == 2 and self.opts.followmerge:  # merge
                     bannotated = curr[0]
                 if blocks is None:  # no parents, add an empty one
-                    blocks = list(self._diffblocks('', curr[1]))
+                    blocks = list(self._diffblocks(b'', curr[1]))
                 self._appendrev(f, blocks, bannotated)
             elif showpath:  # not append linelog, but we need to record path
                 self._node2path[f.node()] = f.path()
@@ -490,7 +490,7 @@
             if hsh is not None and (hsh, self.path) in self.revmap:
                 f = hsh
         if f is None:
-            adjustctx = 'linkrev' if self._perfhack else True
+            adjustctx = b'linkrev' if self._perfhack else True
             f = self._resolvefctx(rev, adjustctx=adjustctx, resolverev=True)
             result = f in self.revmap
             if not result and self._perfhack:
@@ -511,7 +511,7 @@
         # find a chain from rev to anything in the mainbranch
         if revfctx not in self.revmap:
             chain = [revfctx]
-            a = ''
+            a = b''
             while True:
                 f = chain[-1]
                 pl = self._parentfunc(f)
@@ -589,8 +589,8 @@
                         hsh = annotateresult[idxs[0]][0]
                         if self.ui.debugflag:
                             self.ui.debug(
-                                'fastannotate: reading %s line #%d '
-                                'to resolve lines %r\n'
+                                b'fastannotate: reading %s line #%d '
+                                b'to resolve lines %r\n'
                                 % (node.short(hsh), linenum, idxs)
                             )
                         fctx = self._resolvefctx(hsh, revmap.rev2path(rev))
@@ -603,14 +603,15 @@
 
             # run the annotate and the lines should match to the file content
             self.ui.debug(
-                'fastannotate: annotate %s to resolve lines\n' % node.short(hsh)
+                b'fastannotate: annotate %s to resolve lines\n'
+                % node.short(hsh)
             )
             linelog.annotate(rev)
             fctx = self._resolvefctx(hsh, revmap.rev2path(rev))
             annotated = linelog.annotateresult
             lines = mdiff.splitnewlines(fctx.data())
             if len(lines) != len(annotated):
-                raise faerror.CorruptedFileError('unexpected annotated lines')
+                raise faerror.CorruptedFileError(b'unexpected annotated lines')
             # resolve lines from the annotate result
             for i, line in enumerate(lines):
                 k = annotated[i]
@@ -633,11 +634,11 @@
         llrev = self.revmap.hsh2rev(hsh)
         if not llrev:
             raise faerror.CorruptedFileError(
-                '%s is not in revmap' % node.hex(hsh)
+                b'%s is not in revmap' % node.hex(hsh)
             )
         if (self.revmap.rev2flag(llrev) & revmapmod.sidebranchflag) != 0:
             raise faerror.CorruptedFileError(
-                '%s is not in revmap mainbranch' % node.hex(hsh)
+                b'%s is not in revmap mainbranch' % node.hex(hsh)
             )
         self.linelog.annotate(llrev)
         result = [
@@ -677,7 +678,7 @@
             """(fctx) -> int"""
             # f should not be a linelog revision
             if isinstance(f, int):
-                raise error.ProgrammingError('f should not be an int')
+                raise error.ProgrammingError(b'f should not be an int')
             # f is a fctx, allocate linelog rev on demand
             hsh = f.node()
             rev = revmap.hsh2rev(hsh)
@@ -690,7 +691,7 @@
         siderevmap = {}  # node: int
         if bannotated is not None:
             for (a1, a2, b1, b2), op in blocks:
-                if op != '=':
+                if op != b'=':
                     # f could be either linelong rev, or fctx.
                     siderevs += [
                         f
@@ -708,7 +709,7 @@
         siderevmap[fctx] = llrev
 
         for (a1, a2, b1, b2), op in reversed(blocks):
-            if op == '=':
+            if op == b'=':
                 continue
             if bannotated is None:
                 linelog.replacelines(llrev, a1, a2, b1, b2)
@@ -760,7 +761,7 @@
 
     @util.propertycache
     def _perfhack(self):
-        return self.ui.configbool('fastannotate', 'perfhack')
+        return self.ui.configbool(b'fastannotate', b'perfhack')
 
     def _resolvefctx(self, rev, path=None, **kwds):
         return resolvefctx(self.repo, rev, (path or self.path), **kwds)
@@ -781,7 +782,7 @@
     def __init__(self, repo, path, opts=defaultopts):
         # different options use different directories
         self._vfspath = os.path.join(
-            'fastannotate', opts.shortstr, encodedir(path)
+            b'fastannotate', opts.shortstr, encodedir(path)
         )
         self._repo = repo
 
@@ -791,14 +792,14 @@
 
     @property
     def linelogpath(self):
-        return self._repo.vfs.join(self._vfspath + '.l')
+        return self._repo.vfs.join(self._vfspath + b'.l')
 
     def lock(self):
-        return lockmod.lock(self._repo.vfs, self._vfspath + '.lock')
+        return lockmod.lock(self._repo.vfs, self._vfspath + b'.lock')
 
     @property
     def revmappath(self):
-        return self._repo.vfs.join(self._vfspath + '.m')
+        return self._repo.vfs.join(self._vfspath + b'.m')
 
 
 @contextlib.contextmanager
@@ -831,7 +832,7 @@
     except Exception:
         if actx is not None:
             actx.rebuild()
-        repo.ui.debug('fastannotate: %s: cache broken and deleted\n' % path)
+        repo.ui.debug(b'fastannotate: %s: cache broken and deleted\n' % path)
         raise
     finally:
         if actx is not None:
@@ -844,7 +845,7 @@
     """
     repo = fctx._repo
     path = fctx._path
-    if repo.ui.configbool('fastannotate', 'forcefollow', True):
+    if repo.ui.configbool(b'fastannotate', b'forcefollow', True):
         follow = True
     aopts = annotateopts(diffopts=diffopts, followrename=follow)
     return annotatecontext(repo, path, aopts, rebuild)
--- a/hgext/fastannotate/formatter.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/fastannotate/formatter.py	Sun Oct 06 09:48:39 2019 -0400
@@ -33,35 +33,35 @@
         hexfunc = self._hexfunc
 
         # special handling working copy "changeset" and "rev" functions
-        if self.opts.get('rev') == 'wdir()':
+        if self.opts.get(b'rev') == b'wdir()':
             orig = hexfunc
             hexfunc = lambda x: None if x is None else orig(x)
-            wnode = hexfunc(repo['.'].node()) + '+'
-            wrev = '%d' % repo['.'].rev()
-            wrevpad = ''
-            if not opts.get('changeset'):  # only show + if changeset is hidden
-                wrev += '+'
-                wrevpad = ' '
-            revenc = lambda x: wrev if x is None else ('%d' % x) + wrevpad
+            wnode = hexfunc(repo[b'.'].node()) + b'+'
+            wrev = b'%d' % repo[b'.'].rev()
+            wrevpad = b''
+            if not opts.get(b'changeset'):  # only show + if changeset is hidden
+                wrev += b'+'
+                wrevpad = b' '
+            revenc = lambda x: wrev if x is None else (b'%d' % x) + wrevpad
 
             def csetenc(x):
                 if x is None:
                     return wnode
-                return pycompat.bytestr(x) + ' '
+                return pycompat.bytestr(x) + b' '
 
         else:
             revenc = csetenc = pycompat.bytestr
 
         # opt name, separator, raw value (for json/plain), encoder (for plain)
         opmap = [
-            ('user', ' ', lambda x: getctx(x).user(), ui.shortuser),
-            ('number', ' ', lambda x: getctx(x).rev(), revenc),
-            ('changeset', ' ', lambda x: hexfunc(x[0]), csetenc),
-            ('date', ' ', lambda x: getctx(x).date(), datefunc),
-            ('file', ' ', lambda x: x[2], pycompat.bytestr),
-            ('line_number', ':', lambda x: x[1] + 1, pycompat.bytestr),
+            (b'user', b' ', lambda x: getctx(x).user(), ui.shortuser),
+            (b'number', b' ', lambda x: getctx(x).rev(), revenc),
+            (b'changeset', b' ', lambda x: hexfunc(x[0]), csetenc),
+            (b'date', b' ', lambda x: getctx(x).date(), datefunc),
+            (b'file', b' ', lambda x: x[2], pycompat.bytestr),
+            (b'line_number', b':', lambda x: x[1] + 1, pycompat.bytestr),
         ]
-        fieldnamemap = {'number': 'rev', 'changeset': 'node'}
+        fieldnamemap = {b'number': b'rev', b'changeset': b'node'}
         funcmap = [
             (get, sep, fieldnamemap.get(op, op), enc)
             for op, sep, get, enc in opmap
@@ -69,7 +69,7 @@
         ]
         # no separator for first column
         funcmap[0] = list(funcmap[0])
-        funcmap[0][1] = ''
+        funcmap[0][1] = b''
         self.funcmap = funcmap
 
     def write(self, annotatedresult, lines=None, existinglines=None):
@@ -83,39 +83,39 @@
         for f, sep, name, enc in self.funcmap:
             l = [enc(f(x)) for x in annotatedresult]
             pieces.append(l)
-            if name in ['node', 'date']:  # node and date has fixed size
+            if name in [b'node', b'date']:  # node and date has fixed size
                 l = l[:1]
             widths = pycompat.maplist(encoding.colwidth, set(l))
             maxwidth = max(widths) if widths else 0
             maxwidths.append(maxwidth)
 
         # buffered output
-        result = ''
+        result = b''
         for i in pycompat.xrange(len(annotatedresult)):
             for j, p in enumerate(pieces):
                 sep = self.funcmap[j][1]
-                padding = ' ' * (maxwidths[j] - len(p[i]))
+                padding = b' ' * (maxwidths[j] - len(p[i]))
                 result += sep + padding + p[i]
             if lines:
                 if existinglines is None:
-                    result += ': ' + lines[i]
+                    result += b': ' + lines[i]
                 else:  # extra formatting showing whether a line exists
                     key = (annotatedresult[i][0], annotatedresult[i][1])
                     if key in existinglines:
-                        result += ':  ' + lines[i]
+                        result += b':  ' + lines[i]
                     else:
-                        result += ': ' + self.ui.label(
-                            '-' + lines[i], 'diff.deleted'
+                        result += b': ' + self.ui.label(
+                            b'-' + lines[i], b'diff.deleted'
                         )
 
-            if result[-1:] != '\n':
-                result += '\n'
+            if result[-1:] != b'\n':
+                result += b'\n'
 
         self.ui.write(result)
 
     @util.propertycache
     def _hexfunc(self):
-        if self.ui.debugflag or self.opts.get('long_hash'):
+        if self.ui.debugflag or self.opts.get(b'long_hash'):
             return node.hex
         else:
             return node.short
@@ -127,7 +127,7 @@
 class jsonformatter(defaultformatter):
     def __init__(self, ui, repo, opts):
         super(jsonformatter, self).__init__(ui, repo, opts)
-        self.ui.write('[')
+        self.ui.write(b'[')
         self.needcomma = False
 
     def write(self, annotatedresult, lines=None, existinglines=None):
@@ -139,23 +139,23 @@
             for f, sep, name, enc in self.funcmap
         ]
         if lines is not None:
-            pieces.append(('line', lines))
+            pieces.append((b'line', lines))
         pieces.sort()
 
-        seps = [','] * len(pieces[:-1]) + ['']
+        seps = [b','] * len(pieces[:-1]) + [b'']
 
-        result = ''
+        result = b''
         lasti = len(annotatedresult) - 1
         for i in pycompat.xrange(len(annotatedresult)):
-            result += '\n {\n'
+            result += b'\n {\n'
             for j, p in enumerate(pieces):
                 k, vs = p
-                result += '  "%s": %s%s\n' % (
+                result += b'  "%s": %s%s\n' % (
                     k,
                     templatefilters.json(vs[i], paranoid=False),
                     seps[j],
                 )
-            result += ' }%s' % ('' if i == lasti else ',')
+            result += b' }%s' % (b'' if i == lasti else b',')
         if lasti >= 0:
             self.needcomma = True
 
@@ -163,7 +163,7 @@
 
     def _writecomma(self):
         if self.needcomma:
-            self.ui.write(',')
+            self.ui.write(b',')
             self.needcomma = False
 
     @util.propertycache
@@ -171,4 +171,4 @@
         return node.hex
 
     def end(self):
-        self.ui.write('\n]\n')
+        self.ui.write(b'\n]\n')
--- a/hgext/fastannotate/protocol.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/fastannotate/protocol.py	Sun Oct 06 09:48:39 2019 -0400
@@ -25,12 +25,12 @@
 
 def _getmaster(ui):
     """get the mainbranch, and enforce it is set"""
-    master = ui.config('fastannotate', 'mainbranch')
+    master = ui.config(b'fastannotate', b'mainbranch')
     if not master:
         raise error.Abort(
             _(
-                'fastannotate.mainbranch is required '
-                'for both the client and the server'
+                b'fastannotate.mainbranch is required '
+                b'for both the client and the server'
             )
         )
     return master
@@ -41,7 +41,7 @@
 
 def _capabilities(orig, repo, proto):
     result = orig(repo, proto)
-    result.append('getannotate')
+    result.append(b'getannotate')
     return result
 
 
@@ -49,9 +49,9 @@
     # output:
     #   FILE := vfspath + '\0' + str(size) + '\0' + content
     #   OUTPUT := '' | FILE + OUTPUT
-    result = ''
+    result = b''
     buildondemand = repo.ui.configbool(
-        'fastannotate', 'serverbuildondemand', True
+        b'fastannotate', b'serverbuildondemand', True
     )
     with context.annotatecontext(repo, path) as actx:
         if buildondemand:
@@ -80,25 +80,25 @@
             for p in [actx.revmappath, actx.linelogpath]:
                 if not os.path.exists(p):
                     continue
-                with open(p, 'rb') as f:
+                with open(p, b'rb') as f:
                     content = f.read()
-                vfsbaselen = len(repo.vfs.base + '/')
+                vfsbaselen = len(repo.vfs.base + b'/')
                 relpath = p[vfsbaselen:]
-                result += '%s\0%d\0%s' % (relpath, len(content), content)
+                result += b'%s\0%d\0%s' % (relpath, len(content), content)
     return result
 
 
 def _registerwireprotocommand():
-    if 'getannotate' in wireprotov1server.commands:
+    if b'getannotate' in wireprotov1server.commands:
         return
-    wireprotov1server.wireprotocommand('getannotate', 'path lastnode')(
+    wireprotov1server.wireprotocommand(b'getannotate', b'path lastnode')(
         _getannotate
     )
 
 
 def serveruisetup(ui):
     _registerwireprotocommand()
-    extensions.wrapfunction(wireprotov1server, '_capabilities', _capabilities)
+    extensions.wrapfunction(wireprotov1server, b'_capabilities', _capabilities)
 
 
 # client-side
@@ -109,15 +109,15 @@
     i = 0
     l = len(payload) - 1
     state = 0  # 0: vfspath, 1: size
-    vfspath = size = ''
+    vfspath = size = b''
     while i < l:
         ch = payload[i : i + 1]
-        if ch == '\0':
+        if ch == b'\0':
             if state == 1:
                 result[vfspath] = payload[i + 1 : i + 1 + int(size)]
                 i += int(size)
                 state = 0
-                vfspath = size = ''
+                vfspath = size = b''
             elif state == 0:
                 state = 1
         else:
@@ -133,11 +133,11 @@
     class fastannotatepeer(peer.__class__):
         @wireprotov1peer.batchable
         def getannotate(self, path, lastnode=None):
-            if not self.capable('getannotate'):
-                ui.warn(_('remote peer cannot provide annotate cache\n'))
+            if not self.capable(b'getannotate'):
+                ui.warn(_(b'remote peer cannot provide annotate cache\n'))
                 yield None, None
             else:
-                args = {'path': path, 'lastnode': lastnode or ''}
+                args = {b'path': path, b'lastnode': lastnode or b''}
                 f = wireprotov1peer.future()
                 yield args, f
                 yield _parseresponse(f.value)
@@ -150,7 +150,7 @@
     ui = repo.ui
 
     remotepath = ui.expandpath(
-        ui.config('fastannotate', 'remotepath', 'default')
+        ui.config(b'fastannotate', b'remotepath', b'default')
     )
     peer = hg.peer(ui, {}, remotepath)
 
@@ -175,11 +175,12 @@
     ui = repo.ui
     results = []
     with peer.commandexecutor() as batcher:
-        ui.debug('fastannotate: requesting %d files\n' % len(paths))
+        ui.debug(b'fastannotate: requesting %d files\n' % len(paths))
         for p in paths:
             results.append(
                 batcher.callcommand(
-                    'getannotate', {'path': p, 'lastnode': lastnodemap.get(p)}
+                    b'getannotate',
+                    {b'path': p, b'lastnode': lastnodemap.get(p)},
                 )
             )
 
@@ -189,19 +190,21 @@
             r = {util.pconvert(p): v for p, v in r.iteritems()}
             for path in sorted(r):
                 # ignore malicious paths
-                if not path.startswith('fastannotate/') or '/../' in (
-                    path + '/'
+                if not path.startswith(b'fastannotate/') or b'/../' in (
+                    path + b'/'
                 ):
-                    ui.debug('fastannotate: ignored malicious path %s\n' % path)
+                    ui.debug(
+                        b'fastannotate: ignored malicious path %s\n' % path
+                    )
                     continue
                 content = r[path]
                 if ui.debugflag:
                     ui.debug(
-                        'fastannotate: writing %d bytes to %s\n'
+                        b'fastannotate: writing %d bytes to %s\n'
                         % (len(content), path)
                     )
                 repo.vfs.makedirs(os.path.dirname(path))
-                with repo.vfs(path, 'wb') as f:
+                with repo.vfs(path, b'wb') as f:
                     f.write(content)
 
 
@@ -209,7 +212,7 @@
     """return a subset of paths whose history is long and need to fetch linelog
     from the server. works with remotefilelog and non-remotefilelog repos.
     """
-    threshold = repo.ui.configint('fastannotate', 'clientfetchthreshold', 10)
+    threshold = repo.ui.configint(b'fastannotate', b'clientfetchthreshold', 10)
     if threshold <= 0:
         return paths
 
@@ -240,7 +243,7 @@
                     clientfetch(self, needupdatepaths, lastnodemap, peer)
             except Exception as ex:
                 # could be directory not writable or so, not fatal
-                self.ui.debug('fastannotate: prefetch failed: %r\n' % ex)
+                self.ui.debug(b'fastannotate: prefetch failed: %r\n' % ex)
 
     repo.__class__ = fastannotaterepo
 
--- a/hgext/fastannotate/revmap.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/fastannotate/revmap.py	Sun Oct 06 09:48:39 2019 -0400
@@ -70,7 +70,7 @@
         # since rename does not happen frequently, do not store path for every
         # revision. self._renamerevs can be used for bisecting.
         self._renamerevs = [0]
-        self._renamepaths = ['']
+        self._renamepaths = [b'']
         self._lastmaxrev = -1
         if path:
             if os.path.exists(path):
@@ -98,9 +98,13 @@
         if flush is True, incrementally update the file.
         """
         if hsh in self._hsh2rev:
-            raise error.CorruptedFileError('%r is in revmap already' % hex(hsh))
+            raise error.CorruptedFileError(
+                b'%r is in revmap already' % hex(hsh)
+            )
         if len(hsh) != _hshlen:
-            raise hgerror.ProgrammingError('hsh must be %d-char long' % _hshlen)
+            raise hgerror.ProgrammingError(
+                b'hsh must be %d-char long' % _hshlen
+            )
         idx = len(self._rev2hsh)
         flag = 0
         if sidebranch:
@@ -149,7 +153,7 @@
         self._rev2hsh = [None]
         self._rev2flag = [None]
         self._hsh2rev = {}
-        self._rev2path = ['']
+        self._rev2path = [b'']
         self._lastmaxrev = -1
         if flush:
             self.flush()
@@ -159,12 +163,12 @@
         if not self.path:
             return
         if self._lastmaxrev == -1:  # write the entire file
-            with open(self.path, 'wb') as f:
+            with open(self.path, b'wb') as f:
                 f.write(self.HEADER)
                 for i in pycompat.xrange(1, len(self._rev2hsh)):
                     self._writerev(i, f)
         else:  # append incrementally
-            with open(self.path, 'ab') as f:
+            with open(self.path, b'ab') as f:
                 for i in pycompat.xrange(
                     self._lastmaxrev + 1, len(self._rev2hsh)
                 ):
@@ -179,7 +183,7 @@
         # which is faster than both LOAD_CONST and LOAD_GLOBAL.
         flaglen = 1
         hshlen = _hshlen
-        with open(self.path, 'rb') as f:
+        with open(self.path, b'rb') as f:
             if f.read(len(self.HEADER)) != self.HEADER:
                 raise error.CorruptedFileError()
             self.clear(flush=False)
@@ -205,23 +209,23 @@
         """append a revision data to file"""
         flag = self._rev2flag[rev]
         hsh = self._rev2hsh[rev]
-        f.write(struct.pack('B', flag))
+        f.write(struct.pack(b'B', flag))
         if flag & renameflag:
             path = self.rev2path(rev)
             if path is None:
-                raise error.CorruptedFileError('cannot find path for %s' % rev)
+                raise error.CorruptedFileError(b'cannot find path for %s' % rev)
             f.write(path + b'\0')
         f.write(hsh)
 
     @staticmethod
     def _readcstr(f):
         """read a C-language-like '\0'-terminated string"""
-        buf = ''
+        buf = b''
         while True:
             ch = f.read(1)
             if not ch:  # unexpected eof
                 raise error.CorruptedFileError()
-            if ch == '\0':
+            if ch == b'\0':
                 break
             buf += ch
         return buf
@@ -249,7 +253,7 @@
     """
     hsh = None
     try:
-        with open(path, 'rb') as f:
+        with open(path, b'rb') as f:
             f.seek(-_hshlen, io.SEEK_END)
             if f.tell() > len(revmap.HEADER):
                 hsh = f.read(_hshlen)
--- a/hgext/fastannotate/support.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/fastannotate/support.py	Sun Oct 06 09:48:39 2019 -0400
@@ -64,7 +64,7 @@
 
 def _getmaster(fctx):
     """(fctx) -> str"""
-    return fctx._repo.ui.config('fastannotate', 'mainbranch') or 'default'
+    return fctx._repo.ui.config(b'fastannotate', b'mainbranch') or b'default'
 
 
 def _doannotate(fctx, follow=True, diffopts=None):
@@ -83,7 +83,7 @@
         except Exception:
             ac.rebuild()  # try rebuild once
             fctx._repo.ui.debug(
-                'fastannotate: %s: rebuilding broken cache\n' % fctx._path
+                b'fastannotate: %s: rebuilding broken cache\n' % fctx._path
             )
             try:
                 annotated, contents = ac.annotate(
@@ -98,7 +98,7 @@
 
 def _hgwebannotate(orig, fctx, ui):
     diffopts = patch.difffeatureopts(
-        ui, untrusted=True, section='annotate', whitespace=True
+        ui, untrusted=True, section=b'annotate', whitespace=True
     )
     return _doannotate(fctx, diffopts=diffopts)
 
@@ -115,7 +115,7 @@
         return _doannotate(self, follow, diffopts)
     except Exception as ex:
         self._repo.ui.debug(
-            'fastannotate: falling back to the vanilla ' 'annotate: %r\n' % ex
+            b'fastannotate: falling back to the vanilla ' b'annotate: %r\n' % ex
         )
         return orig(self, follow=follow, skiprevs=skiprevs, diffopts=diffopts)
 
@@ -130,8 +130,8 @@
 
 
 def replacehgwebannotate():
-    extensions.wrapfunction(hgweb.webutil, 'annotate', _hgwebannotate)
+    extensions.wrapfunction(hgweb.webutil, b'annotate', _hgwebannotate)
 
 
 def replacefctxannotate():
-    extensions.wrapfunction(hgcontext.basefilectx, 'annotate', _fctxannotate)
+    extensions.wrapfunction(hgcontext.basefilectx, b'annotate', _fctxannotate)
--- a/hgext/fetch.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/fetch.py	Sun Oct 06 09:48:39 2019 -0400
@@ -30,30 +30,30 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 
 @command(
-    'fetch',
+    b'fetch',
     [
         (
-            'r',
-            'rev',
+            b'r',
+            b'rev',
             [],
-            _('a specific revision you would like to pull'),
-            _('REV'),
+            _(b'a specific revision you would like to pull'),
+            _(b'REV'),
         ),
-        ('', 'edit', None, _('invoke editor on commit messages')),
-        ('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
-        ('', 'switch-parent', None, _('switch parents when merging')),
+        (b'', b'edit', None, _(b'invoke editor on commit messages')),
+        (b'', b'force-editor', None, _(b'edit commit message (DEPRECATED)')),
+        (b'', b'switch-parent', None, _(b'switch parents when merging')),
     ]
     + cmdutil.commitopts
     + cmdutil.commitopts2
     + cmdutil.remoteopts,
-    _('hg fetch [SOURCE]'),
+    _(b'hg fetch [SOURCE]'),
     helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
 )
-def fetch(ui, repo, source='default', **opts):
+def fetch(ui, repo, source=b'default', **opts):
     '''pull changes from a remote repository, merge new changes if needed.
 
     This finds all changes from the repository at the specified path
@@ -74,9 +74,9 @@
     '''
 
     opts = pycompat.byteskwargs(opts)
-    date = opts.get('date')
+    date = opts.get(b'date')
     if date:
-        opts['date'] = dateutil.parsedate(date)
+        opts[b'date'] = dateutil.parsedate(date)
 
     parent = repo.dirstate.p1()
     branch = repo.dirstate.branch()
@@ -86,8 +86,8 @@
         branchnode = None
     if parent != branchnode:
         raise error.Abort(
-            _('working directory not at branch tip'),
-            hint=_("use 'hg update' to check out branch tip"),
+            _(b'working directory not at branch tip'),
+            hint=_(b"use 'hg update' to check out branch tip"),
         )
 
     wlock = lock = None
@@ -102,23 +102,23 @@
         if len(bheads) > 1:
             raise error.Abort(
                 _(
-                    'multiple heads in this branch '
-                    '(use "hg heads ." and "hg merge" to merge)'
+                    b'multiple heads in this branch '
+                    b'(use "hg heads ." and "hg merge" to merge)'
                 )
             )
 
         other = hg.peer(repo, opts, ui.expandpath(source))
         ui.status(
-            _('pulling from %s\n') % util.hidepassword(ui.expandpath(source))
+            _(b'pulling from %s\n') % util.hidepassword(ui.expandpath(source))
         )
         revs = None
-        if opts['rev']:
+        if opts[b'rev']:
             try:
-                revs = [other.lookup(rev) for rev in opts['rev']]
+                revs = [other.lookup(rev) for rev in opts[b'rev']]
             except error.CapabilityError:
                 err = _(
-                    "other repository doesn't support revision lookup, "
-                    "so a rev cannot be specified."
+                    b"other repository doesn't support revision lookup, "
+                    b"so a rev cannot be specified."
                 )
                 raise error.Abort(err)
 
@@ -146,8 +146,8 @@
         if len(newheads) > 1:
             ui.status(
                 _(
-                    'not merging with %d other new branch heads '
-                    '(use "hg heads ." and "hg merge" to merge them)\n'
+                    b'not merging with %d other new branch heads '
+                    b'(use "hg heads ." and "hg merge" to merge them)\n'
                 )
                 % (len(newheads) - 1)
             )
@@ -162,17 +162,17 @@
             # By default, we consider the repository we're pulling
             # *from* as authoritative, so we merge our changes into
             # theirs.
-            if opts['switch_parent']:
+            if opts[b'switch_parent']:
                 firstparent, secondparent = newparent, newheads[0]
             else:
                 firstparent, secondparent = newheads[0], newparent
                 ui.status(
-                    _('updating to %d:%s\n')
+                    _(b'updating to %d:%s\n')
                     % (repo.changelog.rev(firstparent), short(firstparent))
                 )
             hg.clean(repo, firstparent)
             ui.status(
-                _('merging with %d:%s\n')
+                _(b'merging with %d:%s\n')
                 % (repo.changelog.rev(secondparent), short(secondparent))
             )
             err = hg.merge(repo, secondparent, remind=False)
@@ -180,13 +180,15 @@
         if not err:
             # we don't translate commit messages
             message = cmdutil.logmessage(ui, opts) or (
-                'Automated merge with %s' % util.removeauth(other.url())
+                b'Automated merge with %s' % util.removeauth(other.url())
             )
-            editopt = opts.get('edit') or opts.get('force_editor')
-            editor = cmdutil.getcommiteditor(edit=editopt, editform='fetch')
-            n = repo.commit(message, opts['user'], opts['date'], editor=editor)
+            editopt = opts.get(b'edit') or opts.get(b'force_editor')
+            editor = cmdutil.getcommiteditor(edit=editopt, editform=b'fetch')
+            n = repo.commit(
+                message, opts[b'user'], opts[b'date'], editor=editor
+            )
             ui.status(
-                _('new changeset %d:%s merges remote changes ' 'with local\n')
+                _(b'new changeset %d:%s merges remote changes ' b'with local\n')
                 % (repo.changelog.rev(n), short(n))
             )
 
--- a/hgext/fix.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/fix.py	Sun Oct 06 09:48:39 2019 -0400
@@ -157,7 +157,7 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 cmdtable = {}
 command = registrar.command(cmdtable)
@@ -167,61 +167,61 @@
 
 # Register the suboptions allowed for each configured fixer, and default values.
 FIXER_ATTRS = {
-    'command': None,
-    'linerange': None,
-    'pattern': None,
-    'priority': 0,
-    'metadata': 'false',
-    'skipclean': 'true',
-    'enabled': 'true',
+    b'command': None,
+    b'linerange': None,
+    b'pattern': None,
+    b'priority': 0,
+    b'metadata': b'false',
+    b'skipclean': b'true',
+    b'enabled': b'true',
 }
 
 for key, default in FIXER_ATTRS.items():
-    configitem('fix', '.*(:%s)?' % key, default=default, generic=True)
+    configitem(b'fix', b'.*(:%s)?' % key, default=default, generic=True)
 
 # A good default size allows most source code files to be fixed, but avoids
 # letting fixer tools choke on huge inputs, which could be surprising to the
 # user.
-configitem('fix', 'maxfilesize', default='2MB')
+configitem(b'fix', b'maxfilesize', default=b'2MB')
 
 # Allow fix commands to exit non-zero if an executed fixer tool exits non-zero.
 # This helps users do shell scripts that stop when a fixer tool signals a
 # problem.
-configitem('fix', 'failure', default='continue')
+configitem(b'fix', b'failure', default=b'continue')
 
 
 def checktoolfailureaction(ui, message, hint=None):
     """Abort with 'message' if fix.failure=abort"""
-    action = ui.config('fix', 'failure')
-    if action not in ('continue', 'abort'):
+    action = ui.config(b'fix', b'failure')
+    if action not in (b'continue', b'abort'):
         raise error.Abort(
-            _('unknown fix.failure action: %s') % (action,),
-            hint=_('use "continue" or "abort"'),
+            _(b'unknown fix.failure action: %s') % (action,),
+            hint=_(b'use "continue" or "abort"'),
         )
-    if action == 'abort':
+    if action == b'abort':
         raise error.Abort(message, hint=hint)
 
 
-allopt = ('', 'all', False, _('fix all non-public non-obsolete revisions'))
+allopt = (b'', b'all', False, _(b'fix all non-public non-obsolete revisions'))
 baseopt = (
-    '',
-    'base',
+    b'',
+    b'base',
     [],
     _(
-        'revisions to diff against (overrides automatic '
-        'selection, and applies to every revision being '
-        'fixed)'
+        b'revisions to diff against (overrides automatic '
+        b'selection, and applies to every revision being '
+        b'fixed)'
     ),
-    _('REV'),
+    _(b'REV'),
 )
-revopt = ('r', 'rev', [], _('revisions to fix'), _('REV'))
-wdiropt = ('w', 'working-dir', False, _('fix the working directory'))
-wholeopt = ('', 'whole', False, _('always fix every line of a file'))
-usage = _('[OPTION]... [FILE]...')
+revopt = (b'r', b'rev', [], _(b'revisions to fix'), _(b'REV'))
+wdiropt = (b'w', b'working-dir', False, _(b'fix the working directory'))
+wholeopt = (b'', b'whole', False, _(b'always fix every line of a file'))
+usage = _(b'[OPTION]... [FILE]...')
 
 
 @command(
-    'fix',
+    b'fix',
     [allopt, baseopt, revopt, wdiropt, wholeopt],
     usage,
     helpcategory=command.CATEGORY_FILE_CONTENTS,
@@ -250,12 +250,12 @@
     override this default behavior, though it is not usually desirable to do so.
     """
     opts = pycompat.byteskwargs(opts)
-    if opts['all']:
-        if opts['rev']:
-            raise error.Abort(_('cannot specify both "--rev" and "--all"'))
-        opts['rev'] = ['not public() and not obsolete()']
-        opts['working_dir'] = True
-    with repo.wlock(), repo.lock(), repo.transaction('fix'):
+    if opts[b'all']:
+        if opts[b'rev']:
+            raise error.Abort(_(b'cannot specify both "--rev" and "--all"'))
+        opts[b'rev'] = [b'not public() and not obsolete()']
+        opts[b'working_dir'] = True
+    with repo.wlock(), repo.lock(), repo.transaction(b'fix'):
         revstofix = getrevstofix(ui, repo, opts)
         basectxs = getbasectxs(repo, opts, revstofix)
         workqueue, numitems = getworkqueue(
@@ -297,7 +297,7 @@
         wdirwritten = False
         commitorder = sorted(revstofix, reverse=True)
         with ui.makeprogress(
-            topic=_('fixing'), unit=_('files'), total=sum(numitems.values())
+            topic=_(b'fixing'), unit=_(b'files'), total=sum(numitems.values())
         ) as progress:
             for rev, path, filerevmetadata, newdata in results:
                 progress.increment(item=path)
@@ -306,12 +306,12 @@
                 if newdata is not None:
                     filedata[rev][path] = newdata
                     hookargs = {
-                        'rev': rev,
-                        'path': path,
-                        'metadata': filerevmetadata,
+                        b'rev': rev,
+                        b'path': path,
+                        b'metadata': filerevmetadata,
                     }
                     repo.hook(
-                        'postfixfile',
+                        b'postfixfile',
                         throw=False,
                         **pycompat.strkwargs(hookargs)
                     )
@@ -332,11 +332,11 @@
 
         cleanup(repo, replacements, wdirwritten)
         hookargs = {
-            'replacements': replacements,
-            'wdirwritten': wdirwritten,
-            'metadata': aggregatemetadata,
+            b'replacements': replacements,
+            b'wdirwritten': wdirwritten,
+            b'metadata': aggregatemetadata,
         }
-        repo.hook('postfix', throw=True, **pycompat.strkwargs(hookargs))
+        repo.hook(b'postfix', throw=True, **pycompat.strkwargs(hookargs))
 
 
 def cleanup(repo, replacements, wdirwritten):
@@ -353,7 +353,7 @@
     effects of the command, though we choose not to output anything here.
     """
     replacements = {prec: [succ] for prec, succ in replacements.iteritems()}
-    scmutil.cleanupnodes(repo, replacements, 'fix', fixphase=True)
+    scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True)
 
 
 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
@@ -375,7 +375,7 @@
     """
     workqueue = []
     numitems = collections.defaultdict(int)
-    maxfilesize = ui.configbytes('fix', 'maxfilesize')
+    maxfilesize = ui.configbytes(b'fix', b'maxfilesize')
     for rev in sorted(revstofix):
         fixctx = repo[rev]
         match = scmutil.match(fixctx, pats, opts)
@@ -387,7 +387,7 @@
                 continue
             if fctx.size() > maxfilesize:
                 ui.warn(
-                    _('ignoring file larger than %s: %s\n')
+                    _(b'ignoring file larger than %s: %s\n')
                     % (util.bytecount(maxfilesize), path)
                 )
                 continue
@@ -398,29 +398,29 @@
 
 def getrevstofix(ui, repo, opts):
     """Returns the set of revision numbers that should be fixed"""
-    revs = set(scmutil.revrange(repo, opts['rev']))
+    revs = set(scmutil.revrange(repo, opts[b'rev']))
     for rev in revs:
         checkfixablectx(ui, repo, repo[rev])
     if revs:
         cmdutil.checkunfinished(repo)
         checknodescendants(repo, revs)
-    if opts.get('working_dir'):
+    if opts.get(b'working_dir'):
         revs.add(wdirrev)
         if list(merge.mergestate.read(repo).unresolved()):
-            raise error.Abort('unresolved conflicts', hint="use 'hg resolve'")
+            raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
     if not revs:
         raise error.Abort(
-            'no changesets specified', hint='use --rev or --working-dir'
+            b'no changesets specified', hint=b'use --rev or --working-dir'
         )
     return revs
 
 
 def checknodescendants(repo, revs):
     if not obsolete.isenabled(repo, obsolete.allowunstableopt) and repo.revs(
-        '(%ld::) - (%ld)', revs, revs
+        b'(%ld::) - (%ld)', revs, revs
     ):
         raise error.Abort(
-            _('can only fix a changeset together ' 'with all its descendants')
+            _(b'can only fix a changeset together ' b'with all its descendants')
         )
 
 
@@ -428,15 +428,18 @@
     """Aborts if the revision shouldn't be replaced with a fixed one."""
     if not ctx.mutable():
         raise error.Abort(
-            'can\'t fix immutable changeset %s' % (scmutil.formatchangeid(ctx),)
+            b'can\'t fix immutable changeset %s'
+            % (scmutil.formatchangeid(ctx),)
         )
     if ctx.obsolete():
         # It would be better to actually check if the revision has a successor.
         allowdivergence = ui.configbool(
-            'experimental', 'evolution.allowdivergence'
+            b'experimental', b'evolution.allowdivergence'
         )
         if not allowdivergence:
-            raise error.Abort('fixing obsolete revision could cause divergence')
+            raise error.Abort(
+                b'fixing obsolete revision could cause divergence'
+            )
 
 
 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
@@ -473,10 +476,10 @@
     Another way to understand this is that we exclude line ranges that are
     common to the file in all base contexts.
     """
-    if opts.get('whole'):
+    if opts.get(b'whole'):
         # Return a range containing all lines. Rely on the diff implementation's
         # idea of how many lines are in the file, instead of reimplementing it.
-        return difflineranges('', content2)
+        return difflineranges(b'', content2)
 
     rangeslist = []
     for basectx in basectxs:
@@ -484,7 +487,7 @@
         if basepath in basectx:
             content1 = basectx[basepath].data()
         else:
-            content1 = ''
+            content1 = b''
         rangeslist.extend(difflineranges(content1, content2))
     return unionranges(rangeslist)
 
@@ -566,7 +569,7 @@
     ranges = []
     for lines, kind in mdiff.allblocks(content1, content2):
         firstline, lastline = lines[2:4]
-        if kind == '!' and firstline != lastline:
+        if kind == b'!' and firstline != lastline:
             ranges.append((firstline + 1, lastline))
     return ranges
 
@@ -581,8 +584,8 @@
     """
     # The --base flag overrides the usual logic, and we give every revision
     # exactly the set of baserevs that the user specified.
-    if opts.get('base'):
-        baserevs = set(scmutil.revrange(repo, opts.get('base')))
+    if opts.get(b'base'):
+        baserevs = set(scmutil.revrange(repo, opts.get(b'base')))
         if not baserevs:
             baserevs = {nullrev}
         basectxs = {repo[rev] for rev in baserevs}
@@ -621,7 +624,7 @@
             command = fixer.command(ui, path, ranges)
             if command is None:
                 continue
-            ui.debug('subprocess: %s\n' % (command,))
+            ui.debug(b'subprocess: %s\n' % (command,))
             proc = subprocess.Popen(
                 procutil.tonativestr(command),
                 shell=True,
@@ -636,11 +639,11 @@
             newerdata = stdout
             if fixer.shouldoutputmetadata():
                 try:
-                    metadatajson, newerdata = stdout.split('\0', 1)
+                    metadatajson, newerdata = stdout.split(b'\0', 1)
                     metadata[fixername] = json.loads(metadatajson)
                 except ValueError:
                     ui.warn(
-                        _('ignored invalid output from fixer tool: %s\n')
+                        _(b'ignored invalid output from fixer tool: %s\n')
                         % (fixername,)
                     )
                     continue
@@ -650,14 +653,14 @@
                 newdata = newerdata
             else:
                 if not stderr:
-                    message = _('exited with status %d\n') % (proc.returncode,)
+                    message = _(b'exited with status %d\n') % (proc.returncode,)
                     showstderr(ui, fixctx.rev(), fixername, message)
                 checktoolfailureaction(
                     ui,
-                    _('no fixes will be applied'),
+                    _(b'no fixes will be applied'),
                     hint=_(
-                        'use --config fix.failure=continue to apply any '
-                        'successful fixes anyway'
+                        b'use --config fix.failure=continue to apply any '
+                        b'successful fixes anyway'
                     ),
                 )
     return metadata, newdata
@@ -671,14 +674,14 @@
     space and would tend to be included in the error message if they were
     relevant.
     """
-    for line in re.split('[\r\n]+', stderr):
+    for line in re.split(b'[\r\n]+', stderr):
         if line:
-            ui.warn('[')
+            ui.warn(b'[')
             if rev is None:
-                ui.warn(_('wdir'), label='evolve.rev')
+                ui.warn(_(b'wdir'), label=b'evolve.rev')
             else:
-                ui.warn((str(rev)), label='evolve.rev')
-            ui.warn('] %s: %s\n' % (fixername, line))
+                ui.warn((str(rev)), label=b'evolve.rev')
+            ui.warn(b'] %s: %s\n' % (fixername, line))
 
 
 def writeworkingdir(repo, ctx, filedata, replacements):
@@ -694,7 +697,7 @@
     for path, data in filedata.iteritems():
         fctx = ctx[path]
         fctx.write(data, fctx.flags())
-        if repo.dirstate[path] == 'n':
+        if repo.dirstate[path] == b'n':
             repo.dirstate.normallookup(path)
 
     oldparentnodes = repo.dirstate.parents()
@@ -757,7 +760,7 @@
         )
 
     extra = ctx.extra().copy()
-    extra['fix_source'] = ctx.hex()
+    extra[b'fix_source'] = ctx.hex()
 
     memctx = context.memctx(
         repo,
@@ -774,7 +777,7 @@
     sucnode = memctx.commit()
     prenode = ctx.node()
     if prenode == sucnode:
-        ui.debug('node %s already existed\n' % (ctx.hex()))
+        ui.debug(b'node %s already existed\n' % (ctx.hex()))
     else:
         replacements[ctx.node()] = sucnode
 
@@ -788,11 +791,11 @@
     fixers = {}
     for name in fixernames(ui):
         fixers[name] = Fixer()
-        attrs = ui.configsuboptions('fix', name)[1]
+        attrs = ui.configsuboptions(b'fix', name)[1]
         for key, default in FIXER_ATTRS.items():
             setattr(
                 fixers[name],
-                pycompat.sysstr('_' + key),
+                pycompat.sysstr(b'_' + key),
                 attrs.get(key, default),
             )
         fixers[name]._priority = int(fixers[name]._priority)
@@ -805,11 +808,11 @@
         # default.
         if fixers[name]._pattern is None:
             ui.warn(
-                _('fixer tool has no pattern configuration: %s\n') % (name,)
+                _(b'fixer tool has no pattern configuration: %s\n') % (name,)
             )
             del fixers[name]
         elif not fixers[name]._enabled:
-            ui.debug('ignoring disabled fixer tool: %s\n' % (name,))
+            ui.debug(b'ignoring disabled fixer tool: %s\n' % (name,))
             del fixers[name]
     return collections.OrderedDict(
         sorted(fixers.items(), key=lambda item: item[1]._priority, reverse=True)
@@ -819,9 +822,9 @@
 def fixernames(ui):
     """Returns the names of [fix] config options that have suboptions"""
     names = set()
-    for k, v in ui.configitems('fix'):
-        if ':' in k:
-            names.add(k.split(':', 1)[0])
+    for k, v in ui.configitems(b'fix'):
+        if b':' in k:
+            names.add(k.split(b':', 1)[0])
     return names
 
 
@@ -849,7 +852,7 @@
             expand(
                 ui,
                 self._command,
-                {'rootpath': path, 'basename': os.path.basename(path)},
+                {b'rootpath': path, b'basename': os.path.basename(path)},
             )
         ]
         if self._linerange:
@@ -858,6 +861,8 @@
                 return None
             for first, last in ranges:
                 parts.append(
-                    expand(ui, self._linerange, {'first': first, 'last': last})
+                    expand(
+                        ui, self._linerange, {b'first': first, b'last': last}
+                    )
                 )
-        return ' '.join(parts)
+        return b' '.join(parts)
--- a/hgext/fsmonitor/__init__.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/fsmonitor/__init__.py	Sun Oct 06 09:48:39 2019 -0400
@@ -143,60 +143,60 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 configtable = {}
 configitem = registrar.configitem(configtable)
 
 configitem(
-    'fsmonitor', 'mode', default='on',
+    b'fsmonitor', b'mode', default=b'on',
 )
 configitem(
-    'fsmonitor', 'walk_on_invalidate', default=False,
+    b'fsmonitor', b'walk_on_invalidate', default=False,
 )
 configitem(
-    'fsmonitor', 'timeout', default='2',
+    b'fsmonitor', b'timeout', default=b'2',
 )
 configitem(
-    'fsmonitor', 'blacklistusers', default=list,
+    b'fsmonitor', b'blacklistusers', default=list,
 )
 configitem(
-    'fsmonitor', 'watchman_exe', default='watchman',
+    b'fsmonitor', b'watchman_exe', default=b'watchman',
 )
 configitem(
-    'fsmonitor', 'verbose', default=True, experimental=True,
+    b'fsmonitor', b'verbose', default=True, experimental=True,
 )
 configitem(
-    'experimental', 'fsmonitor.transaction_notify', default=False,
+    b'experimental', b'fsmonitor.transaction_notify', default=False,
 )
 
 # This extension is incompatible with the following blacklisted extensions
 # and will disable itself when encountering one of these:
-_blacklist = ['largefiles', 'eol']
+_blacklist = [b'largefiles', b'eol']
 
 
 def debuginstall(ui, fm):
     fm.write(
-        "fsmonitor-watchman",
-        _("fsmonitor checking for watchman binary... (%s)\n"),
-        ui.configpath("fsmonitor", "watchman_exe"),
+        b"fsmonitor-watchman",
+        _(b"fsmonitor checking for watchman binary... (%s)\n"),
+        ui.configpath(b"fsmonitor", b"watchman_exe"),
     )
     root = tempfile.mkdtemp()
     c = watchmanclient.client(ui, root)
     err = None
     try:
-        v = c.command("version")
+        v = c.command(b"version")
         fm.write(
-            "fsmonitor-watchman-version",
-            _(" watchman binary version %s\n"),
-            v["version"],
+            b"fsmonitor-watchman-version",
+            _(b" watchman binary version %s\n"),
+            v[b"version"],
         )
     except watchmanclient.Unavailable as e:
         err = str(e)
     fm.condwrite(
         err,
-        "fsmonitor-watchman-error",
-        _(" watchman binary missing or broken: %s\n"),
+        b"fsmonitor-watchman-error",
+        _(b" watchman binary missing or broken: %s\n"),
         err,
     )
     return 1 if err else 0
@@ -206,16 +206,16 @@
     """Exception handler for Watchman interaction exceptions"""
     if isinstance(ex, watchmanclient.Unavailable):
         # experimental config: fsmonitor.verbose
-        if ex.warn and ui.configbool('fsmonitor', 'verbose'):
-            if 'illegal_fstypes' not in str(ex):
-                ui.warn(str(ex) + '\n')
+        if ex.warn and ui.configbool(b'fsmonitor', b'verbose'):
+            if b'illegal_fstypes' not in str(ex):
+                ui.warn(str(ex) + b'\n')
         if ex.invalidate:
             state.invalidate()
         # experimental config: fsmonitor.verbose
-        if ui.configbool('fsmonitor', 'verbose'):
-            ui.log('fsmonitor', 'Watchman unavailable: %s\n', ex.msg)
+        if ui.configbool(b'fsmonitor', b'verbose'):
+            ui.log(b'fsmonitor', b'Watchman unavailable: %s\n', ex.msg)
     else:
-        ui.log('fsmonitor', 'Watchman exception: %s\n', ex)
+        ui.log(b'fsmonitor', b'Watchman exception: %s\n', ex)
 
 
 def _hashignore(ignore):
@@ -245,7 +245,7 @@
     try:
         decoded = path.decode(_watchmanencoding)
     except UnicodeDecodeError as e:
-        raise error.Abort(str(e), hint='watchman encoding error')
+        raise error.Abort(str(e), hint=b'watchman encoding error')
 
     try:
         encoded = decoded.encode(_fsencoding, 'strict')
@@ -263,34 +263,34 @@
     subset of files.'''
 
     def bail(reason):
-        self._ui.debug('fsmonitor: fallback to core status, %s\n' % reason)
+        self._ui.debug(b'fsmonitor: fallback to core status, %s\n' % reason)
         return orig(match, subrepos, unknown, ignored, full=True)
 
     if full:
-        return bail('full rewalk requested')
+        return bail(b'full rewalk requested')
     if ignored:
-        return bail('listing ignored files')
+        return bail(b'listing ignored files')
     if not self._watchmanclient.available():
-        return bail('client unavailable')
+        return bail(b'client unavailable')
     state = self._fsmonitorstate
     clock, ignorehash, notefiles = state.get()
     if not clock:
         if state.walk_on_invalidate:
-            return bail('no clock')
+            return bail(b'no clock')
         # Initial NULL clock value, see
         # https://facebook.github.io/watchman/docs/clockspec.html
-        clock = 'c:0:0'
+        clock = b'c:0:0'
         notefiles = []
 
     ignore = self._ignore
     dirignore = self._dirignore
     if unknown:
-        if _hashignore(ignore) != ignorehash and clock != 'c:0:0':
+        if _hashignore(ignore) != ignorehash and clock != b'c:0:0':
             # ignore list changed -- can't rely on Watchman state any more
             if state.walk_on_invalidate:
-                return bail('ignore rules changed')
+                return bail(b'ignore rules changed')
             notefiles = []
-            clock = 'c:0:0'
+            clock = b'c:0:0'
     else:
         # always ignore
         ignore = util.always
@@ -299,7 +299,7 @@
     matchfn = match.matchfn
     matchalways = match.always()
     dmap = self._map
-    if util.safehasattr(dmap, '_map'):
+    if util.safehasattr(dmap, b'_map'):
         # for better performance, directly access the inner dirstate map if the
         # standard dirstate implementation is in use.
         dmap = dmap._map
@@ -339,7 +339,7 @@
     if not work and (exact or skipstep3):
         for s in subrepos:
             del results[s]
-        del results['.hg']
+        del results[b'.hg']
         return results
 
     # step 2: query Watchman
@@ -349,30 +349,34 @@
         # overheads while transferring the data
         self._watchmanclient.settimeout(state.timeout + 0.1)
         result = self._watchmanclient.command(
-            'query',
+            b'query',
             {
-                'fields': ['mode', 'mtime', 'size', 'exists', 'name'],
-                'since': clock,
-                'expression': [
-                    'not',
-                    ['anyof', ['dirname', '.hg'], ['name', '.hg', 'wholename']],
+                b'fields': [b'mode', b'mtime', b'size', b'exists', b'name'],
+                b'since': clock,
+                b'expression': [
+                    b'not',
+                    [
+                        b'anyof',
+                        [b'dirname', b'.hg'],
+                        [b'name', b'.hg', b'wholename'],
+                    ],
                 ],
-                'sync_timeout': int(state.timeout * 1000),
-                'empty_on_fresh_instance': state.walk_on_invalidate,
+                b'sync_timeout': int(state.timeout * 1000),
+                b'empty_on_fresh_instance': state.walk_on_invalidate,
             },
         )
     except Exception as ex:
         _handleunavailable(self._ui, state, ex)
         self._watchmanclient.clearconnection()
-        return bail('exception during run')
+        return bail(b'exception during run')
     else:
         # We need to propagate the last observed clock up so that we
         # can use it for our next query
-        state.setlastclock(result['clock'])
-        if result['is_fresh_instance']:
+        state.setlastclock(result[b'clock'])
+        if result[b'is_fresh_instance']:
             if state.walk_on_invalidate:
                 state.invalidate()
-                return bail('fresh instance')
+                return bail(b'fresh instance')
             fresh_instance = True
             # Ignore any prior noteable files from the state info
             notefiles = []
@@ -382,7 +386,7 @@
     if normalize:
         foldmap = dict((normcase(k), k) for k in results)
 
-    switch_slashes = pycompat.ossep == '\\'
+    switch_slashes = pycompat.ossep == b'\\'
     # The order of the results is, strictly speaking, undefined.
     # For case changes on a case insensitive filesystem we may receive
     # two entries, one with exists=True and another with exists=False.
@@ -390,22 +394,22 @@
     # as being happens-after the exists=False entries due to the way that
     # Watchman tracks files.  We use this property to reconcile deletes
     # for name case changes.
-    for entry in result['files']:
-        fname = entry['name']
+    for entry in result[b'files']:
+        fname = entry[b'name']
         if _fixencoding:
             fname = _watchmantofsencoding(fname)
         if switch_slashes:
-            fname = fname.replace('\\', '/')
+            fname = fname.replace(b'\\', b'/')
         if normalize:
             normed = normcase(fname)
             fname = normalize(fname, True, True)
             foldmap[normed] = fname
-        fmode = entry['mode']
-        fexists = entry['exists']
+        fmode = entry[b'mode']
+        fexists = entry[b'exists']
         kind = getkind(fmode)
 
-        if '/.hg/' in fname or fname.endswith('/.hg'):
-            return bail('nested-repo-detected')
+        if b'/.hg/' in fname or fname.endswith(b'/.hg'):
+            return bail(b'nested-repo-detected')
 
         if not fexists:
             # if marked as deleted and we don't already have a change
@@ -488,14 +492,14 @@
 
     for s in subrepos:
         del results[s]
-    del results['.hg']
+    del results[b'.hg']
     return results
 
 
 def overridestatus(
     orig,
     self,
-    node1='.',
+    node1=b'.',
     node2=None,
     match=None,
     ignored=False,
@@ -509,22 +513,22 @@
 
     def _cmpsets(l1, l2):
         try:
-            if 'FSMONITOR_LOG_FILE' in encoding.environ:
-                fn = encoding.environ['FSMONITOR_LOG_FILE']
-                f = open(fn, 'wb')
+            if b'FSMONITOR_LOG_FILE' in encoding.environ:
+                fn = encoding.environ[b'FSMONITOR_LOG_FILE']
+                f = open(fn, b'wb')
             else:
-                fn = 'fsmonitorfail.log'
-                f = self.vfs.open(fn, 'wb')
+                fn = b'fsmonitorfail.log'
+                f = self.vfs.open(fn, b'wb')
         except (IOError, OSError):
-            self.ui.warn(_('warning: unable to write to %s\n') % fn)
+            self.ui.warn(_(b'warning: unable to write to %s\n') % fn)
             return
 
         try:
             for i, (s1, s2) in enumerate(zip(l1, l2)):
                 if set(s1) != set(s2):
-                    f.write('sets at position %d are unequal\n' % i)
-                    f.write('watchman returned: %s\n' % s1)
-                    f.write('stat returned: %s\n' % s2)
+                    f.write(b'sets at position %d are unequal\n' % i)
+                    f.write(b'watchman returned: %s\n' % s1)
+                    f.write(b'stat returned: %s\n' % s2)
         finally:
             f.close()
 
@@ -538,7 +542,7 @@
         ctx2 = self[node2]
 
     working = ctx2.rev() is None
-    parentworking = working and ctx1 == self['.']
+    parentworking = working and ctx1 == self[b'.']
     match = match or matchmod.always()
 
     # Maybe we can use this opportunity to update Watchman's state.
@@ -552,7 +556,7 @@
         parentworking
         and match.always()
         and not isinstance(ctx2, (context.workingcommitctx, context.memctx))
-        and 'HG_PENDING' not in encoding.environ
+        and b'HG_PENDING' not in encoding.environ
     )
 
     try:
@@ -607,7 +611,7 @@
 
     # don't do paranoid checks if we're not going to query Watchman anyway
     full = listclean or match.traversedir is not None
-    if self._fsmonitorstate.mode == 'paranoid' and not full:
+    if self._fsmonitorstate.mode == b'paranoid' and not full:
         # run status again and fall back to the old walk this time
         self.dirstate._fsmonitordisable = True
 
@@ -615,7 +619,7 @@
         quiet = self.ui.quiet
         self.ui.quiet = True
         fout, ferr = self.ui.fout, self.ui.ferr
-        self.ui.fout = self.ui.ferr = open(os.devnull, 'wb')
+        self.ui.fout = self.ui.ferr = open(os.devnull, b'wb')
 
         try:
             rv2 = orig(
@@ -692,20 +696,20 @@
 def wrapdirstate(orig, self):
     ds = orig(self)
     # only override the dirstate when Watchman is available for the repo
-    if util.safehasattr(self, '_fsmonitorstate'):
+    if util.safehasattr(self, b'_fsmonitorstate'):
         makedirstate(self, ds)
     return ds
 
 
 def extsetup(ui):
     extensions.wrapfilecache(
-        localrepo.localrepository, 'dirstate', wrapdirstate
+        localrepo.localrepository, b'dirstate', wrapdirstate
     )
     if pycompat.isdarwin:
         # An assist for avoiding the dangling-symlink fsevents bug
-        extensions.wrapfunction(os, 'symlink', wrapsymlink)
+        extensions.wrapfunction(os, b'symlink', wrapsymlink)
 
-    extensions.wrapfunction(merge, 'update', wrapupdate)
+    extensions.wrapfunction(merge, b'update', wrapupdate)
 
 
 def wrapsymlink(orig, source, link_name):
@@ -756,14 +760,14 @@
         # merge.update is going to take the wlock almost immediately. We are
         # effectively extending the lock around several short sanity checks.
         if self.oldnode is None:
-            self.oldnode = self.repo['.'].node()
+            self.oldnode = self.repo[b'.'].node()
 
         if self.repo.currentwlock() is None:
-            if util.safehasattr(self.repo, 'wlocknostateupdate'):
+            if util.safehasattr(self.repo, b'wlocknostateupdate'):
                 self._lock = self.repo.wlocknostateupdate()
             else:
                 self._lock = self.repo.wlock()
-        self.need_leave = self._state('state-enter', hex(self.oldnode))
+        self.need_leave = self._state(b'state-enter', hex(self.oldnode))
         return self
 
     def __exit__(self, type_, value, tb):
@@ -773,36 +777,36 @@
     def exit(self, abort=False):
         try:
             if self.need_leave:
-                status = 'failed' if abort else 'ok'
+                status = b'failed' if abort else b'ok'
                 if self.newnode is None:
-                    self.newnode = self.repo['.'].node()
+                    self.newnode = self.repo[b'.'].node()
                 if self.distance is None:
                     self.distance = calcdistance(
                         self.repo, self.oldnode, self.newnode
                     )
-                self._state('state-leave', hex(self.newnode), status=status)
+                self._state(b'state-leave', hex(self.newnode), status=status)
         finally:
             self.need_leave = False
             if self._lock:
                 self._lock.release()
 
-    def _state(self, cmd, commithash, status='ok'):
-        if not util.safehasattr(self.repo, '_watchmanclient'):
+    def _state(self, cmd, commithash, status=b'ok'):
+        if not util.safehasattr(self.repo, b'_watchmanclient'):
             return False
         try:
             self.repo._watchmanclient.command(
                 cmd,
                 {
-                    'name': self.name,
-                    'metadata': {
+                    b'name': self.name,
+                    b'metadata': {
                         # the target revision
-                        'rev': commithash,
+                        b'rev': commithash,
                         # approximate number of commits between current and target
-                        'distance': self.distance if self.distance else 0,
+                        b'distance': self.distance if self.distance else 0,
                         # success/failure (only really meaningful for state-leave)
-                        'status': status,
+                        b'status': status,
                         # whether the working copy parent is changing
-                        'partial': self.partial,
+                        b'partial': self.partial,
                     },
                 },
             )
@@ -810,7 +814,7 @@
         except Exception as e:
             # Swallow any errors; fire and forget
             self.repo.ui.log(
-                'watchman', 'Exception %s while running %s\n', e, cmd
+                b'watchman', b'Exception %s while running %s\n', e, cmd
             )
             return False
 
@@ -844,7 +848,7 @@
 
     distance = 0
     partial = True
-    oldnode = repo['.'].node()
+    oldnode = repo[b'.'].node()
     newnode = repo[node].node()
     if matcher is None or matcher.always():
         partial = False
@@ -852,7 +856,7 @@
 
     with state_update(
         repo,
-        name="hg.update",
+        name=b"hg.update",
         oldnode=oldnode,
         newnode=newnode,
         distance=distance,
@@ -873,8 +877,8 @@
 
 def repo_has_depth_one_nested_repo(repo):
     for f in repo.wvfs.listdir():
-        if os.path.isdir(os.path.join(repo.root, f, '.hg')):
-            msg = 'fsmonitor: sub-repository %r detected, fsmonitor disabled\n'
+        if os.path.isdir(os.path.join(repo.root, f, b'.hg')):
+            msg = b'fsmonitor: sub-repository %r detected, fsmonitor disabled\n'
             repo.ui.debug(msg % f)
             return True
     return False
@@ -887,8 +891,8 @@
         if ext in exts:
             ui.warn(
                 _(
-                    'The fsmonitor extension is incompatible with the %s '
-                    'extension and has been disabled.\n'
+                    b'The fsmonitor extension is incompatible with the %s '
+                    b'extension and has been disabled.\n'
                 )
                 % ext
             )
@@ -899,14 +903,14 @@
         #
         # if repo[None].substate can cause a dirstate parse, which is too
         # slow. Instead, look for a file called hgsubstate,
-        if repo.wvfs.exists('.hgsubstate') or repo.wvfs.exists('.hgsub'):
+        if repo.wvfs.exists(b'.hgsubstate') or repo.wvfs.exists(b'.hgsub'):
             return
 
         if repo_has_depth_one_nested_repo(repo):
             return
 
         fsmonitorstate = state.state(repo)
-        if fsmonitorstate.mode == 'off':
+        if fsmonitorstate.mode == b'off':
             return
 
         try:
@@ -918,7 +922,7 @@
         repo._fsmonitorstate = fsmonitorstate
         repo._watchmanclient = client
 
-        dirstate, cached = localrepo.isfilecached(repo, 'dirstate')
+        dirstate, cached = localrepo.isfilecached(repo, b'dirstate')
         if cached:
             # at this point since fsmonitorstate wasn't present,
             # repo.dirstate is not a fsmonitordirstate
@@ -935,7 +939,7 @@
             def wlock(self, *args, **kwargs):
                 l = super(fsmonitorrepo, self).wlock(*args, **kwargs)
                 if not ui.configbool(
-                    "experimental", "fsmonitor.transaction_notify"
+                    b"experimental", b"fsmonitor.transaction_notify"
                 ):
                     return l
                 if l.held != 1:
@@ -951,12 +955,14 @@
 
                 try:
                     l.stateupdate = None
-                    l.stateupdate = state_update(self, name="hg.transaction")
+                    l.stateupdate = state_update(self, name=b"hg.transaction")
                     l.stateupdate.enter()
                     l.releasefn = staterelease
                 except Exception as e:
                     # Swallow any errors; fire and forget
-                    self.ui.log('watchman', 'Exception in state update %s\n', e)
+                    self.ui.log(
+                        b'watchman', b'Exception in state update %s\n', e
+                    )
                 return l
 
         repo.__class__ = fsmonitorrepo
--- a/hgext/fsmonitor/state.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/fsmonitor/state.py	Sun Oct 06 09:48:39 2019 -0400
@@ -19,7 +19,7 @@
 )
 
 _version = 4
-_versionformat = ">I"
+_versionformat = b">I"
 
 
 class state(object):
@@ -30,15 +30,15 @@
         self._lastclock = None
         self._identity = util.filestat(None)
 
-        self.mode = self._ui.config('fsmonitor', 'mode')
+        self.mode = self._ui.config(b'fsmonitor', b'mode')
         self.walk_on_invalidate = self._ui.configbool(
-            'fsmonitor', 'walk_on_invalidate'
+            b'fsmonitor', b'walk_on_invalidate'
         )
-        self.timeout = float(self._ui.config('fsmonitor', 'timeout'))
+        self.timeout = float(self._ui.config(b'fsmonitor', b'timeout'))
 
     def get(self):
         try:
-            file = self._vfs('fsmonitor.state', 'rb')
+            file = self._vfs(b'fsmonitor.state', b'rb')
         except IOError as inst:
             self._identity = util.filestat(None)
             if inst.errno != errno.ENOENT:
@@ -50,9 +50,9 @@
         versionbytes = file.read(4)
         if len(versionbytes) < 4:
             self._ui.log(
-                'fsmonitor',
-                'fsmonitor: state file only has %d bytes, '
-                'nuking state\n' % len(versionbytes),
+                b'fsmonitor',
+                b'fsmonitor: state file only has %d bytes, '
+                b'nuking state\n' % len(versionbytes),
             )
             self.invalidate()
             return None, None, None
@@ -61,21 +61,21 @@
             if diskversion != _version:
                 # different version, nuke state and start over
                 self._ui.log(
-                    'fsmonitor',
-                    'fsmonitor: version switch from %d to '
-                    '%d, nuking state\n' % (diskversion, _version),
+                    b'fsmonitor',
+                    b'fsmonitor: version switch from %d to '
+                    b'%d, nuking state\n' % (diskversion, _version),
                 )
                 self.invalidate()
                 return None, None, None
 
-            state = file.read().split('\0')
+            state = file.read().split(b'\0')
             # state = hostname\0clock\0ignorehash\0 + list of files, each
             # followed by a \0
             if len(state) < 3:
                 self._ui.log(
-                    'fsmonitor',
-                    'fsmonitor: state file truncated (expected '
-                    '3 chunks, found %d), nuking state\n',
+                    b'fsmonitor',
+                    b'fsmonitor: state file truncated (expected '
+                    b'3 chunks, found %d), nuking state\n',
                     len(state),
                 )
                 self.invalidate()
@@ -85,9 +85,9 @@
             if diskhostname != hostname:
                 # file got moved to a different host
                 self._ui.log(
-                    'fsmonitor',
-                    'fsmonitor: stored hostname "%s" '
-                    'different from current "%s", nuking state\n'
+                    b'fsmonitor',
+                    b'fsmonitor: stored hostname "%s" '
+                    b'different from current "%s", nuking state\n'
                     % (diskhostname, hostname),
                 )
                 self.invalidate()
@@ -110,31 +110,33 @@
 
         # Read the identity from the file on disk rather than from the open file
         # pointer below, because the latter is actually a brand new file.
-        identity = util.filestat.frompath(self._vfs.join('fsmonitor.state'))
+        identity = util.filestat.frompath(self._vfs.join(b'fsmonitor.state'))
         if identity != self._identity:
-            self._ui.debug('skip updating fsmonitor.state: identity mismatch\n')
+            self._ui.debug(
+                b'skip updating fsmonitor.state: identity mismatch\n'
+            )
             return
 
         try:
             file = self._vfs(
-                'fsmonitor.state', 'wb', atomictemp=True, checkambig=True
+                b'fsmonitor.state', b'wb', atomictemp=True, checkambig=True
             )
         except (IOError, OSError):
-            self._ui.warn(_("warning: unable to write out fsmonitor state\n"))
+            self._ui.warn(_(b"warning: unable to write out fsmonitor state\n"))
             return
 
         with file:
             file.write(struct.pack(_versionformat, _version))
-            file.write(socket.gethostname() + '\0')
-            file.write(clock + '\0')
-            file.write(ignorehash + '\0')
+            file.write(socket.gethostname() + b'\0')
+            file.write(clock + b'\0')
+            file.write(ignorehash + b'\0')
             if notefiles:
-                file.write('\0'.join(notefiles))
-                file.write('\0')
+                file.write(b'\0'.join(notefiles))
+                file.write(b'\0')
 
     def invalidate(self):
         try:
-            os.unlink(os.path.join(self._rootdir, '.hg', 'fsmonitor.state'))
+            os.unlink(os.path.join(self._rootdir, b'.hg', b'fsmonitor.state'))
         except OSError as inst:
             if inst.errno != errno.ENOENT:
                 raise
--- a/hgext/fsmonitor/watchmanclient.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/fsmonitor/watchmanclient.py	Sun Oct 06 09:48:39 2019 -0400
@@ -18,15 +18,15 @@
     def __init__(self, msg, warn=True, invalidate=False):
         self.msg = msg
         self.warn = warn
-        if self.msg == 'timed out waiting for response':
+        if self.msg == b'timed out waiting for response':
             self.warn = False
         self.invalidate = invalidate
 
     def __str__(self):
         if self.warn:
-            return 'warning: Watchman unavailable: %s' % self.msg
+            return b'warning: Watchman unavailable: %s' % self.msg
         else:
-            return 'Watchman unavailable: %s' % self.msg
+            return b'Watchman unavailable: %s' % self.msg
 
 
 class WatchmanNoRoot(Unavailable):
@@ -39,10 +39,10 @@
     def __init__(self, ui, root, timeout=1.0):
         err = None
         if not self._user:
-            err = "couldn't get user"
+            err = b"couldn't get user"
             warn = True
-        if self._user in ui.configlist('fsmonitor', 'blacklistusers'):
-            err = 'user %s in blacklist' % self._user
+        if self._user in ui.configlist(b'fsmonitor', b'blacklistusers'):
+            err = b'user %s in blacklist' % self._user
             warn = False
 
         if err:
@@ -60,10 +60,10 @@
             self._watchmanclient.setTimeout(timeout)
 
     def getcurrentclock(self):
-        result = self.command('clock')
-        if not util.safehasattr(result, 'clock'):
+        result = self.command(b'clock')
+        if not util.safehasattr(result, b'clock'):
             raise Unavailable(
-                'clock result is missing clock value', invalidate=True
+                b'clock result is missing clock value', invalidate=True
             )
         return result.clock
 
@@ -86,7 +86,9 @@
         try:
             if self._watchmanclient is None:
                 self._firsttime = False
-                watchman_exe = self._ui.configpath('fsmonitor', 'watchman_exe')
+                watchman_exe = self._ui.configpath(
+                    b'fsmonitor', b'watchman_exe'
+                )
                 self._watchmanclient = pywatchman.client(
                     timeout=self._timeout,
                     useImmutableBser=True,
@@ -94,7 +96,7 @@
                 )
             return self._watchmanclient.query(*watchmanargs)
         except pywatchman.CommandError as ex:
-            if 'unable to resolve root' in ex.msg:
+            if b'unable to resolve root' in ex.msg:
                 raise WatchmanNoRoot(self._root, ex.msg)
             raise Unavailable(ex.msg)
         except pywatchman.WatchmanError as ex:
@@ -107,7 +109,7 @@
             except WatchmanNoRoot:
                 # this 'watch' command can also raise a WatchmanNoRoot if
                 # watchman refuses to accept this root
-                self._command('watch')
+                self._command(b'watch')
                 return self._command(*args)
         except Unavailable:
             # this is in an outer scope to catch Unavailable form any of the
--- a/hgext/githelp.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/githelp.py	Sun Oct 06 09:48:39 2019 -0400
@@ -35,26 +35,26 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 cmdtable = {}
 command = registrar.command(cmdtable)
 
 
 def convert(s):
-    if s.startswith("origin/"):
+    if s.startswith(b"origin/"):
         return s[7:]
-    if 'HEAD' in s:
-        s = s.replace('HEAD', '.')
+    if b'HEAD' in s:
+        s = s.replace(b'HEAD', b'.')
     # HEAD~ in git is .~1 in mercurial
-    s = re.sub('~$', '~1', s)
+    s = re.sub(b'~$', b'~1', s)
     return s
 
 
 @command(
-    'githelp|git',
+    b'githelp|git',
     [],
-    _('hg githelp'),
+    _(b'hg githelp'),
     helpcategory=command.CATEGORY_HELP,
     helpbasic=True,
 )
@@ -64,19 +64,19 @@
     Usage: hg githelp -- <git command>
     '''
 
-    if len(args) == 0 or (len(args) == 1 and args[0] == 'git'):
+    if len(args) == 0 or (len(args) == 1 and args[0] == b'git'):
         raise error.Abort(
-            _('missing git command - ' 'usage: hg githelp -- <git command>')
+            _(b'missing git command - ' b'usage: hg githelp -- <git command>')
         )
 
-    if args[0] == 'git':
+    if args[0] == b'git':
         args = args[1:]
 
     cmd = args[0]
     if not cmd in gitcommands:
-        raise error.Abort(_("error: unknown git command %s") % cmd)
+        raise error.Abort(_(b"error: unknown git command %s") % cmd)
 
-    ui.pager('githelp')
+    ui.pager(b'githelp')
     args = args[1:]
     return gitcommands[cmd](ui, repo, *args, **kwargs)
 
@@ -93,24 +93,24 @@
             if r"requires argument" in ex.msg:
                 raise
             if (r'--' + ex.opt) in ex.msg:
-                flag = '--' + pycompat.bytestr(ex.opt)
+                flag = b'--' + pycompat.bytestr(ex.opt)
             elif (r'-' + ex.opt) in ex.msg:
-                flag = '-' + pycompat.bytestr(ex.opt)
+                flag = b'-' + pycompat.bytestr(ex.opt)
             else:
                 raise error.Abort(
-                    _("unknown option %s") % pycompat.bytestr(ex.opt)
+                    _(b"unknown option %s") % pycompat.bytestr(ex.opt)
                 )
             try:
                 args.remove(flag)
             except Exception:
-                msg = _("unknown option '%s' packed with other options")
-                hint = _("please try passing the option as its own flag: -%s")
+                msg = _(b"unknown option '%s' packed with other options")
+                hint = _(b"please try passing the option as its own flag: -%s")
                 raise error.Abort(
                     msg % pycompat.bytestr(ex.opt),
                     hint=hint % pycompat.bytestr(ex.opt),
                 )
 
-            ui.warn(_("ignoring unknown option %s\n") % flag)
+            ui.warn(_(b"ignoring unknown option %s\n") % flag)
 
     args = list([convert(x) for x in args])
     opts = dict(
@@ -130,22 +130,22 @@
         self.opts = {}
 
     def __bytes__(self):
-        cmd = "hg " + self.name
+        cmd = b"hg " + self.name
         if self.opts:
             for k, values in sorted(self.opts.iteritems()):
                 for v in values:
                     if v:
                         if isinstance(v, int):
-                            fmt = ' %s %d'
+                            fmt = b' %s %d'
                         else:
-                            fmt = ' %s %s'
+                            fmt = b' %s %s'
 
                         cmd += fmt % (k, v)
                     else:
-                        cmd += " %s" % (k,)
+                        cmd += b" %s" % (k,)
         if self.args:
-            cmd += " "
-            cmd += " ".join(self.args)
+            cmd += b" "
+            cmd += b" ".join(self.args)
         return cmd
 
     __str__ = encoding.strmethod(__bytes__)
@@ -170,7 +170,7 @@
         self.right = right
 
     def __str__(self):
-        return "%s && %s" % (self.left, self.right)
+        return b"%s && %s" % (self.left, self.right)
 
     def __and__(self, other):
         return AndCommand(self, other)
@@ -178,113 +178,113 @@
 
 def add(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('A', 'all', None, ''),
-        ('p', 'patch', None, ''),
+        (b'A', b'all', None, b''),
+        (b'p', b'patch', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    if opts.get('patch'):
+    if opts.get(b'patch'):
         ui.status(
             _(
-                "note: Mercurial will commit when complete, "
-                "as there is no staging area in Mercurial\n\n"
+                b"note: Mercurial will commit when complete, "
+                b"as there is no staging area in Mercurial\n\n"
             )
         )
-        cmd = Command('commit --interactive')
+        cmd = Command(b'commit --interactive')
     else:
-        cmd = Command("add")
+        cmd = Command(b"add")
 
-        if not opts.get('all'):
+        if not opts.get(b'all'):
             cmd.extend(args)
         else:
             ui.status(
                 _(
-                    "note: use hg addremove to remove files that have "
-                    "been deleted\n\n"
+                    b"note: use hg addremove to remove files that have "
+                    b"been deleted\n\n"
                 )
             )
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def am(ui, repo, *args, **kwargs):
     cmdoptions = []
     args, opts = parseoptions(ui, cmdoptions, args)
-    cmd = Command('import')
-    ui.status(bytes(cmd), "\n")
+    cmd = Command(b'import')
+    ui.status(bytes(cmd), b"\n")
 
 
 def apply(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('p', 'p', int, ''),
-        ('', 'directory', '', ''),
+        (b'p', b'p', int, b''),
+        (b'', b'directory', b'', b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    cmd = Command('import --no-commit')
-    if opts.get('p'):
-        cmd['-p'] = opts.get('p')
-    if opts.get('directory'):
-        cmd['--prefix'] = opts.get('directory')
+    cmd = Command(b'import --no-commit')
+    if opts.get(b'p'):
+        cmd[b'-p'] = opts.get(b'p')
+    if opts.get(b'directory'):
+        cmd[b'--prefix'] = opts.get(b'directory')
     cmd.extend(args)
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def bisect(ui, repo, *args, **kwargs):
-    ui.status(_("see 'hg help bisect' for how to use bisect\n\n"))
+    ui.status(_(b"see 'hg help bisect' for how to use bisect\n\n"))
 
 
 def blame(ui, repo, *args, **kwargs):
     cmdoptions = []
     args, opts = parseoptions(ui, cmdoptions, args)
-    cmd = Command('annotate -udl')
+    cmd = Command(b'annotate -udl')
     cmd.extend([convert(v) for v in args])
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def branch(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('', 'set-upstream', None, ''),
-        ('', 'set-upstream-to', '', ''),
-        ('d', 'delete', None, ''),
-        ('D', 'delete', None, ''),
-        ('m', 'move', None, ''),
-        ('M', 'move', None, ''),
+        (b'', b'set-upstream', None, b''),
+        (b'', b'set-upstream-to', b'', b''),
+        (b'd', b'delete', None, b''),
+        (b'D', b'delete', None, b''),
+        (b'm', b'move', None, b''),
+        (b'M', b'move', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    cmd = Command("bookmark")
+    cmd = Command(b"bookmark")
 
-    if opts.get('set_upstream') or opts.get('set_upstream_to'):
-        ui.status(_("Mercurial has no concept of upstream branches\n"))
+    if opts.get(b'set_upstream') or opts.get(b'set_upstream_to'):
+        ui.status(_(b"Mercurial has no concept of upstream branches\n"))
         return
-    elif opts.get('delete'):
-        cmd = Command("strip")
+    elif opts.get(b'delete'):
+        cmd = Command(b"strip")
         for branch in args:
-            cmd['-B'] = branch
+            cmd[b'-B'] = branch
         else:
-            cmd['-B'] = None
-    elif opts.get('move'):
+            cmd[b'-B'] = None
+    elif opts.get(b'move'):
         if len(args) > 0:
             if len(args) > 1:
                 old = args.pop(0)
             else:
                 # shell command to output the active bookmark for the active
                 # revision
-                old = '`hg log -T"{activebookmark}" -r .`'
+                old = b'`hg log -T"{activebookmark}" -r .`'
         else:
-            raise error.Abort(_('missing newbranch argument'))
+            raise error.Abort(_(b'missing newbranch argument'))
         new = args[0]
-        cmd['-m'] = old
+        cmd[b'-m'] = old
         cmd.append(new)
     else:
         if len(args) > 1:
-            cmd['-r'] = args[1]
+            cmd[b'-r'] = args[1]
             cmd.append(args[0])
         elif len(args) == 1:
             cmd.append(args[0])
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def ispath(repo, string):
@@ -300,16 +300,16 @@
         return False
 
     cwd = repo.getcwd()
-    if cwd == '':
+    if cwd == b'':
         repopath = string
     else:
-        repopath = cwd + '/' + string
+        repopath = cwd + b'/' + string
 
     exists = repo.wvfs.exists(repopath)
     if exists:
         return True
 
-    manifest = repo['.'].manifest()
+    manifest = repo[b'.'].manifest()
 
     didexist = (repopath in manifest) or manifest.hasdir(repopath)
 
@@ -318,14 +318,14 @@
 
 def checkout(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('b', 'branch', '', ''),
-        ('B', 'branch', '', ''),
-        ('f', 'force', None, ''),
-        ('p', 'patch', None, ''),
+        (b'b', b'branch', b'', b''),
+        (b'B', b'branch', b'', b''),
+        (b'f', b'force', None, b''),
+        (b'p', b'patch', None, b''),
     ]
     paths = []
-    if '--' in args:
-        sepindex = args.index('--')
+    if b'--' in args:
+        sepindex = args.index(b'--')
         paths.extend(args[sepindex + 1 :])
         args = args[:sepindex]
 
@@ -338,410 +338,410 @@
         rev = args[0]
         paths = args[1:] + paths
 
-    cmd = Command('update')
+    cmd = Command(b'update')
 
-    if opts.get('force'):
+    if opts.get(b'force'):
         if paths or rev:
-            cmd['-C'] = None
+            cmd[b'-C'] = None
 
-    if opts.get('patch'):
-        cmd = Command('revert')
-        cmd['-i'] = None
+    if opts.get(b'patch'):
+        cmd = Command(b'revert')
+        cmd[b'-i'] = None
 
-    if opts.get('branch'):
+    if opts.get(b'branch'):
         if len(args) == 0:
-            cmd = Command('bookmark')
-            cmd.append(opts.get('branch'))
+            cmd = Command(b'bookmark')
+            cmd.append(opts.get(b'branch'))
         else:
             cmd.append(args[0])
-            bookcmd = Command('bookmark')
-            bookcmd.append(opts.get('branch'))
+            bookcmd = Command(b'bookmark')
+            bookcmd.append(opts.get(b'branch'))
             cmd = cmd & bookcmd
     # if there is any path argument supplied, use revert instead of update
     elif len(paths) > 0:
-        ui.status(_("note: use --no-backup to avoid creating .orig files\n\n"))
-        cmd = Command('revert')
-        if opts.get('patch'):
-            cmd['-i'] = None
+        ui.status(_(b"note: use --no-backup to avoid creating .orig files\n\n"))
+        cmd = Command(b'revert')
+        if opts.get(b'patch'):
+            cmd[b'-i'] = None
         if rev:
-            cmd['-r'] = rev
+            cmd[b'-r'] = rev
         cmd.extend(paths)
     elif rev:
-        if opts.get('patch'):
-            cmd['-r'] = rev
+        if opts.get(b'patch'):
+            cmd[b'-r'] = rev
         else:
             cmd.append(rev)
-    elif opts.get('force'):
-        cmd = Command('revert')
-        cmd['--all'] = None
+    elif opts.get(b'force'):
+        cmd = Command(b'revert')
+        cmd[b'--all'] = None
     else:
-        raise error.Abort(_("a commit must be specified"))
+        raise error.Abort(_(b"a commit must be specified"))
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def cherrypick(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('', 'continue', None, ''),
-        ('', 'abort', None, ''),
-        ('e', 'edit', None, ''),
+        (b'', b'continue', None, b''),
+        (b'', b'abort', None, b''),
+        (b'e', b'edit', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    cmd = Command('graft')
+    cmd = Command(b'graft')
 
-    if opts.get('edit'):
-        cmd['--edit'] = None
-    if opts.get('continue'):
-        cmd['--continue'] = None
-    elif opts.get('abort'):
-        ui.status(_("note: hg graft does not have --abort\n\n"))
+    if opts.get(b'edit'):
+        cmd[b'--edit'] = None
+    if opts.get(b'continue'):
+        cmd[b'--continue'] = None
+    elif opts.get(b'abort'):
+        ui.status(_(b"note: hg graft does not have --abort\n\n"))
         return
     else:
         cmd.extend(args)
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def clean(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('d', 'd', None, ''),
-        ('f', 'force', None, ''),
-        ('x', 'x', None, ''),
+        (b'd', b'd', None, b''),
+        (b'f', b'force', None, b''),
+        (b'x', b'x', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    cmd = Command('purge')
-    if opts.get('x'):
-        cmd['--all'] = None
+    cmd = Command(b'purge')
+    if opts.get(b'x'):
+        cmd[b'--all'] = None
     cmd.extend(args)
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def clone(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('', 'bare', None, ''),
-        ('n', 'no-checkout', None, ''),
-        ('b', 'branch', '', ''),
+        (b'', b'bare', None, b''),
+        (b'n', b'no-checkout', None, b''),
+        (b'b', b'branch', b'', b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
     if len(args) == 0:
-        raise error.Abort(_("a repository to clone must be specified"))
+        raise error.Abort(_(b"a repository to clone must be specified"))
 
-    cmd = Command('clone')
+    cmd = Command(b'clone')
     cmd.append(args[0])
     if len(args) > 1:
         cmd.append(args[1])
 
-    if opts.get('bare'):
-        cmd['-U'] = None
+    if opts.get(b'bare'):
+        cmd[b'-U'] = None
         ui.status(
             _(
-                "note: Mercurial does not have bare clones. "
-                "-U will clone the repo without checking out a commit\n\n"
+                b"note: Mercurial does not have bare clones. "
+                b"-U will clone the repo without checking out a commit\n\n"
             )
         )
-    elif opts.get('no_checkout'):
-        cmd['-U'] = None
+    elif opts.get(b'no_checkout'):
+        cmd[b'-U'] = None
 
-    if opts.get('branch'):
-        cocmd = Command("update")
-        cocmd.append(opts.get('branch'))
+    if opts.get(b'branch'):
+        cocmd = Command(b"update")
+        cocmd.append(opts.get(b'branch'))
         cmd = cmd & cocmd
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def commit(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('a', 'all', None, ''),
-        ('m', 'message', '', ''),
-        ('p', 'patch', None, ''),
-        ('C', 'reuse-message', '', ''),
-        ('F', 'file', '', ''),
-        ('', 'author', '', ''),
-        ('', 'date', '', ''),
-        ('', 'amend', None, ''),
-        ('', 'no-edit', None, ''),
+        (b'a', b'all', None, b''),
+        (b'm', b'message', b'', b''),
+        (b'p', b'patch', None, b''),
+        (b'C', b'reuse-message', b'', b''),
+        (b'F', b'file', b'', b''),
+        (b'', b'author', b'', b''),
+        (b'', b'date', b'', b''),
+        (b'', b'amend', None, b''),
+        (b'', b'no-edit', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    cmd = Command('commit')
-    if opts.get('patch'):
-        cmd = Command('commit --interactive')
+    cmd = Command(b'commit')
+    if opts.get(b'patch'):
+        cmd = Command(b'commit --interactive')
 
-    if opts.get('amend'):
-        if opts.get('no_edit'):
-            cmd = Command('amend')
+    if opts.get(b'amend'):
+        if opts.get(b'no_edit'):
+            cmd = Command(b'amend')
         else:
-            cmd['--amend'] = None
+            cmd[b'--amend'] = None
 
-    if opts.get('reuse_message'):
-        cmd['-M'] = opts.get('reuse_message')
+    if opts.get(b'reuse_message'):
+        cmd[b'-M'] = opts.get(b'reuse_message')
 
-    if opts.get('message'):
-        cmd['-m'] = "'%s'" % (opts.get('message'),)
+    if opts.get(b'message'):
+        cmd[b'-m'] = b"'%s'" % (opts.get(b'message'),)
 
-    if opts.get('all'):
+    if opts.get(b'all'):
         ui.status(
             _(
-                "note: Mercurial doesn't have a staging area, "
-                "so there is no --all. -A will add and remove files "
-                "for you though.\n\n"
+                b"note: Mercurial doesn't have a staging area, "
+                b"so there is no --all. -A will add and remove files "
+                b"for you though.\n\n"
             )
         )
 
-    if opts.get('file'):
-        cmd['-l'] = opts.get('file')
+    if opts.get(b'file'):
+        cmd[b'-l'] = opts.get(b'file')
 
-    if opts.get('author'):
-        cmd['-u'] = opts.get('author')
+    if opts.get(b'author'):
+        cmd[b'-u'] = opts.get(b'author')
 
-    if opts.get('date'):
-        cmd['-d'] = opts.get('date')
+    if opts.get(b'date'):
+        cmd[b'-d'] = opts.get(b'date')
 
     cmd.extend(args)
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def deprecated(ui, repo, *args, **kwargs):
     ui.warn(
         _(
-            'this command has been deprecated in the git project, '
-            'thus isn\'t supported by this tool\n\n'
+            b'this command has been deprecated in the git project, '
+            b'thus isn\'t supported by this tool\n\n'
         )
     )
 
 
 def diff(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('a', 'all', None, ''),
-        ('', 'cached', None, ''),
-        ('R', 'reverse', None, ''),
+        (b'a', b'all', None, b''),
+        (b'', b'cached', None, b''),
+        (b'R', b'reverse', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    cmd = Command('diff')
+    cmd = Command(b'diff')
 
-    if opts.get('cached'):
+    if opts.get(b'cached'):
         ui.status(
             _(
-                'note: Mercurial has no concept of a staging area, '
-                'so --cached does nothing\n\n'
+                b'note: Mercurial has no concept of a staging area, '
+                b'so --cached does nothing\n\n'
             )
         )
 
-    if opts.get('reverse'):
-        cmd['--reverse'] = None
+    if opts.get(b'reverse'):
+        cmd[b'--reverse'] = None
 
     for a in list(args):
         args.remove(a)
         try:
             repo.revs(a)
-            cmd['-r'] = a
+            cmd[b'-r'] = a
         except Exception:
             cmd.append(a)
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def difftool(ui, repo, *args, **kwargs):
     ui.status(
         _(
-            'Mercurial does not enable external difftool by default. You '
-            'need to enable the extdiff extension in your .hgrc file by adding\n'
-            'extdiff =\n'
-            'to the [extensions] section and then running\n\n'
-            'hg extdiff -p <program>\n\n'
-            'See \'hg help extdiff\' and \'hg help -e extdiff\' for more '
-            'information.\n'
+            b'Mercurial does not enable external difftool by default. You '
+            b'need to enable the extdiff extension in your .hgrc file by adding\n'
+            b'extdiff =\n'
+            b'to the [extensions] section and then running\n\n'
+            b'hg extdiff -p <program>\n\n'
+            b'See \'hg help extdiff\' and \'hg help -e extdiff\' for more '
+            b'information.\n'
         )
     )
 
 
 def fetch(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('', 'all', None, ''),
-        ('f', 'force', None, ''),
+        (b'', b'all', None, b''),
+        (b'f', b'force', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    cmd = Command('pull')
+    cmd = Command(b'pull')
 
     if len(args) > 0:
         cmd.append(args[0])
         if len(args) > 1:
             ui.status(
                 _(
-                    "note: Mercurial doesn't have refspecs. "
-                    "-r can be used to specify which commits you want to "
-                    "pull. -B can be used to specify which bookmark you "
-                    "want to pull.\n\n"
+                    b"note: Mercurial doesn't have refspecs. "
+                    b"-r can be used to specify which commits you want to "
+                    b"pull. -B can be used to specify which bookmark you "
+                    b"want to pull.\n\n"
                 )
             )
             for v in args[1:]:
                 if v in repo._bookmarks:
-                    cmd['-B'] = v
+                    cmd[b'-B'] = v
                 else:
-                    cmd['-r'] = v
+                    cmd[b'-r'] = v
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def grep(ui, repo, *args, **kwargs):
     cmdoptions = []
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    cmd = Command('grep')
+    cmd = Command(b'grep')
 
     # For basic usage, git grep and hg grep are the same. They both have the
     # pattern first, followed by paths.
     cmd.extend(args)
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def init(ui, repo, *args, **kwargs):
     cmdoptions = []
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    cmd = Command('init')
+    cmd = Command(b'init')
 
     if len(args) > 0:
         cmd.append(args[0])
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def log(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('', 'follow', None, ''),
-        ('', 'decorate', None, ''),
-        ('n', 'number', '', ''),
-        ('1', '1', None, ''),
-        ('', 'pretty', '', ''),
-        ('', 'format', '', ''),
-        ('', 'oneline', None, ''),
-        ('', 'stat', None, ''),
-        ('', 'graph', None, ''),
-        ('p', 'patch', None, ''),
+        (b'', b'follow', None, b''),
+        (b'', b'decorate', None, b''),
+        (b'n', b'number', b'', b''),
+        (b'1', b'1', None, b''),
+        (b'', b'pretty', b'', b''),
+        (b'', b'format', b'', b''),
+        (b'', b'oneline', None, b''),
+        (b'', b'stat', None, b''),
+        (b'', b'graph', None, b''),
+        (b'p', b'patch', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
     ui.status(
         _(
-            'note: -v prints the entire commit message like Git does. To '
-            'print just the first line, drop the -v.\n\n'
+            b'note: -v prints the entire commit message like Git does. To '
+            b'print just the first line, drop the -v.\n\n'
         )
     )
     ui.status(
         _(
-            "note: see hg help revset for information on how to filter "
-            "log output\n\n"
+            b"note: see hg help revset for information on how to filter "
+            b"log output\n\n"
         )
     )
 
-    cmd = Command('log')
-    cmd['-v'] = None
+    cmd = Command(b'log')
+    cmd[b'-v'] = None
 
-    if opts.get('number'):
-        cmd['-l'] = opts.get('number')
-    if opts.get('1'):
-        cmd['-l'] = '1'
-    if opts.get('stat'):
-        cmd['--stat'] = None
-    if opts.get('graph'):
-        cmd['-G'] = None
-    if opts.get('patch'):
-        cmd['-p'] = None
+    if opts.get(b'number'):
+        cmd[b'-l'] = opts.get(b'number')
+    if opts.get(b'1'):
+        cmd[b'-l'] = b'1'
+    if opts.get(b'stat'):
+        cmd[b'--stat'] = None
+    if opts.get(b'graph'):
+        cmd[b'-G'] = None
+    if opts.get(b'patch'):
+        cmd[b'-p'] = None
 
-    if opts.get('pretty') or opts.get('format') or opts.get('oneline'):
-        format = opts.get('format', '')
-        if 'format:' in format:
+    if opts.get(b'pretty') or opts.get(b'format') or opts.get(b'oneline'):
+        format = opts.get(b'format', b'')
+        if b'format:' in format:
             ui.status(
                 _(
-                    "note: --format format:??? equates to Mercurial's "
-                    "--template. See hg help templates for more info.\n\n"
+                    b"note: --format format:??? equates to Mercurial's "
+                    b"--template. See hg help templates for more info.\n\n"
                 )
             )
-            cmd['--template'] = '???'
+            cmd[b'--template'] = b'???'
         else:
             ui.status(
                 _(
-                    "note: --pretty/format/oneline equate to Mercurial's "
-                    "--style or --template. See hg help templates for "
-                    "more info.\n\n"
+                    b"note: --pretty/format/oneline equate to Mercurial's "
+                    b"--style or --template. See hg help templates for "
+                    b"more info.\n\n"
                 )
             )
-            cmd['--style'] = '???'
+            cmd[b'--style'] = b'???'
 
     if len(args) > 0:
-        if '..' in args[0]:
-            since, until = args[0].split('..')
-            cmd['-r'] = "'%s::%s'" % (since, until)
+        if b'..' in args[0]:
+            since, until = args[0].split(b'..')
+            cmd[b'-r'] = b"'%s::%s'" % (since, until)
             del args[0]
         cmd.extend(args)
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def lsfiles(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('c', 'cached', None, ''),
-        ('d', 'deleted', None, ''),
-        ('m', 'modified', None, ''),
-        ('o', 'others', None, ''),
-        ('i', 'ignored', None, ''),
-        ('s', 'stage', None, ''),
-        ('z', '_zero', None, ''),
+        (b'c', b'cached', None, b''),
+        (b'd', b'deleted', None, b''),
+        (b'm', b'modified', None, b''),
+        (b'o', b'others', None, b''),
+        (b'i', b'ignored', None, b''),
+        (b's', b'stage', None, b''),
+        (b'z', b'_zero', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
     if (
-        opts.get('modified')
-        or opts.get('deleted')
-        or opts.get('others')
-        or opts.get('ignored')
+        opts.get(b'modified')
+        or opts.get(b'deleted')
+        or opts.get(b'others')
+        or opts.get(b'ignored')
     ):
-        cmd = Command('status')
-        if opts.get('deleted'):
-            cmd['-d'] = None
-        if opts.get('modified'):
-            cmd['-m'] = None
-        if opts.get('others'):
-            cmd['-o'] = None
-        if opts.get('ignored'):
-            cmd['-i'] = None
+        cmd = Command(b'status')
+        if opts.get(b'deleted'):
+            cmd[b'-d'] = None
+        if opts.get(b'modified'):
+            cmd[b'-m'] = None
+        if opts.get(b'others'):
+            cmd[b'-o'] = None
+        if opts.get(b'ignored'):
+            cmd[b'-i'] = None
     else:
-        cmd = Command('files')
-    if opts.get('stage'):
+        cmd = Command(b'files')
+    if opts.get(b'stage'):
         ui.status(
             _(
-                "note: Mercurial doesn't have a staging area, ignoring "
-                "--stage\n"
+                b"note: Mercurial doesn't have a staging area, ignoring "
+                b"--stage\n"
             )
         )
-    if opts.get('_zero'):
-        cmd['-0'] = None
-    cmd.append('.')
+    if opts.get(b'_zero'):
+        cmd[b'-0'] = None
+    cmd.append(b'.')
     for include in args:
-        cmd['-I'] = procutil.shellquote(include)
+        cmd[b'-I'] = procutil.shellquote(include)
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def merge(ui, repo, *args, **kwargs):
     cmdoptions = []
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    cmd = Command('merge')
+    cmd = Command(b'merge')
 
     if len(args) > 0:
         cmd.append(args[len(args) - 1])
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def mergebase(ui, repo, *args, **kwargs):
@@ -749,228 +749,228 @@
     args, opts = parseoptions(ui, cmdoptions, args)
 
     if len(args) != 2:
-        args = ['A', 'B']
+        args = [b'A', b'B']
 
     cmd = Command(
-        "log -T '{node}\\n' -r 'ancestor(%s,%s)'" % (args[0], args[1])
+        b"log -T '{node}\\n' -r 'ancestor(%s,%s)'" % (args[0], args[1])
     )
 
     ui.status(
-        _('note: ancestors() is part of the revset language\n'),
-        _("(learn more about revsets with 'hg help revsets')\n\n"),
+        _(b'note: ancestors() is part of the revset language\n'),
+        _(b"(learn more about revsets with 'hg help revsets')\n\n"),
     )
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def mergetool(ui, repo, *args, **kwargs):
     cmdoptions = []
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    cmd = Command("resolve")
+    cmd = Command(b"resolve")
 
     if len(args) == 0:
-        cmd['--all'] = None
+        cmd[b'--all'] = None
     cmd.extend(args)
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def mv(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('f', 'force', None, ''),
-        ('n', 'dry-run', None, ''),
+        (b'f', b'force', None, b''),
+        (b'n', b'dry-run', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    cmd = Command('mv')
+    cmd = Command(b'mv')
     cmd.extend(args)
 
-    if opts.get('force'):
-        cmd['-f'] = None
-    if opts.get('dry_run'):
-        cmd['-n'] = None
+    if opts.get(b'force'):
+        cmd[b'-f'] = None
+    if opts.get(b'dry_run'):
+        cmd[b'-n'] = None
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def pull(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('', 'all', None, ''),
-        ('f', 'force', None, ''),
-        ('r', 'rebase', None, ''),
+        (b'', b'all', None, b''),
+        (b'f', b'force', None, b''),
+        (b'r', b'rebase', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    cmd = Command('pull')
-    cmd['--rebase'] = None
+    cmd = Command(b'pull')
+    cmd[b'--rebase'] = None
 
     if len(args) > 0:
         cmd.append(args[0])
         if len(args) > 1:
             ui.status(
                 _(
-                    "note: Mercurial doesn't have refspecs. "
-                    "-r can be used to specify which commits you want to "
-                    "pull. -B can be used to specify which bookmark you "
-                    "want to pull.\n\n"
+                    b"note: Mercurial doesn't have refspecs. "
+                    b"-r can be used to specify which commits you want to "
+                    b"pull. -B can be used to specify which bookmark you "
+                    b"want to pull.\n\n"
                 )
             )
             for v in args[1:]:
                 if v in repo._bookmarks:
-                    cmd['-B'] = v
+                    cmd[b'-B'] = v
                 else:
-                    cmd['-r'] = v
+                    cmd[b'-r'] = v
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def push(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('', 'all', None, ''),
-        ('f', 'force', None, ''),
+        (b'', b'all', None, b''),
+        (b'f', b'force', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    cmd = Command('push')
+    cmd = Command(b'push')
 
     if len(args) > 0:
         cmd.append(args[0])
         if len(args) > 1:
             ui.status(
                 _(
-                    "note: Mercurial doesn't have refspecs. "
-                    "-r can be used to specify which commits you want "
-                    "to push. -B can be used to specify which bookmark "
-                    "you want to push.\n\n"
+                    b"note: Mercurial doesn't have refspecs. "
+                    b"-r can be used to specify which commits you want "
+                    b"to push. -B can be used to specify which bookmark "
+                    b"you want to push.\n\n"
                 )
             )
             for v in args[1:]:
                 if v in repo._bookmarks:
-                    cmd['-B'] = v
+                    cmd[b'-B'] = v
                 else:
-                    cmd['-r'] = v
+                    cmd[b'-r'] = v
 
-    if opts.get('force'):
-        cmd['-f'] = None
+    if opts.get(b'force'):
+        cmd[b'-f'] = None
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def rebase(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('', 'all', None, ''),
-        ('i', 'interactive', None, ''),
-        ('', 'onto', '', ''),
-        ('', 'abort', None, ''),
-        ('', 'continue', None, ''),
-        ('', 'skip', None, ''),
+        (b'', b'all', None, b''),
+        (b'i', b'interactive', None, b''),
+        (b'', b'onto', b'', b''),
+        (b'', b'abort', None, b''),
+        (b'', b'continue', None, b''),
+        (b'', b'skip', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    if opts.get('interactive'):
+    if opts.get(b'interactive'):
         ui.status(
             _(
-                "note: hg histedit does not perform a rebase. "
-                "It just edits history.\n\n"
+                b"note: hg histedit does not perform a rebase. "
+                b"It just edits history.\n\n"
             )
         )
-        cmd = Command('histedit')
+        cmd = Command(b'histedit')
         if len(args) > 0:
             ui.status(
                 _(
-                    "also note: 'hg histedit' will automatically detect"
-                    " your stack, so no second argument is necessary\n\n"
+                    b"also note: 'hg histedit' will automatically detect"
+                    b" your stack, so no second argument is necessary\n\n"
                 )
             )
-        ui.status((bytes(cmd)), "\n")
+        ui.status((bytes(cmd)), b"\n")
         return
 
-    if opts.get('skip'):
-        cmd = Command('revert --all -r .')
-        ui.status((bytes(cmd)), "\n")
+    if opts.get(b'skip'):
+        cmd = Command(b'revert --all -r .')
+        ui.status((bytes(cmd)), b"\n")
 
-    cmd = Command('rebase')
+    cmd = Command(b'rebase')
 
-    if opts.get('continue') or opts.get('skip'):
-        cmd['--continue'] = None
-    if opts.get('abort'):
-        cmd['--abort'] = None
+    if opts.get(b'continue') or opts.get(b'skip'):
+        cmd[b'--continue'] = None
+    if opts.get(b'abort'):
+        cmd[b'--abort'] = None
 
-    if opts.get('onto'):
+    if opts.get(b'onto'):
         ui.status(
             _(
-                "note: if you're trying to lift a commit off one branch, "
-                "try hg rebase -d <destination commit> -s <commit to be "
-                "lifted>\n\n"
+                b"note: if you're trying to lift a commit off one branch, "
+                b"try hg rebase -d <destination commit> -s <commit to be "
+                b"lifted>\n\n"
             )
         )
-        cmd['-d'] = convert(opts.get('onto'))
+        cmd[b'-d'] = convert(opts.get(b'onto'))
         if len(args) < 2:
-            raise error.Abort(_("expected format: git rebase --onto X Y Z"))
-        cmd['-s'] = "'::%s - ::%s'" % (convert(args[1]), convert(args[0]))
+            raise error.Abort(_(b"expected format: git rebase --onto X Y Z"))
+        cmd[b'-s'] = b"'::%s - ::%s'" % (convert(args[1]), convert(args[0]))
     else:
         if len(args) == 1:
-            cmd['-d'] = convert(args[0])
+            cmd[b'-d'] = convert(args[0])
         elif len(args) == 2:
-            cmd['-d'] = convert(args[0])
-            cmd['-b'] = convert(args[1])
+            cmd[b'-d'] = convert(args[0])
+            cmd[b'-b'] = convert(args[1])
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def reflog(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('', 'all', None, ''),
+        (b'', b'all', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    cmd = Command('journal')
-    if opts.get('all'):
-        cmd['--all'] = None
+    cmd = Command(b'journal')
+    if opts.get(b'all'):
+        cmd[b'--all'] = None
     if len(args) > 0:
         cmd.append(args[0])
 
-    ui.status(bytes(cmd), "\n\n")
+    ui.status(bytes(cmd), b"\n\n")
     ui.status(
         _(
-            "note: in hg commits can be deleted from repo but we always"
-            " have backups\n"
+            b"note: in hg commits can be deleted from repo but we always"
+            b" have backups\n"
         )
     )
 
 
 def reset(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('', 'soft', None, ''),
-        ('', 'hard', None, ''),
-        ('', 'mixed', None, ''),
+        (b'', b'soft', None, b''),
+        (b'', b'hard', None, b''),
+        (b'', b'mixed', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    commit = convert(args[0] if len(args) > 0 else '.')
-    hard = opts.get('hard')
+    commit = convert(args[0] if len(args) > 0 else b'.')
+    hard = opts.get(b'hard')
 
-    if opts.get('mixed'):
+    if opts.get(b'mixed'):
         ui.status(
             _(
-                'note: --mixed has no meaning since Mercurial has no '
-                'staging area\n\n'
+                b'note: --mixed has no meaning since Mercurial has no '
+                b'staging area\n\n'
             )
         )
-    if opts.get('soft'):
+    if opts.get(b'soft'):
         ui.status(
             _(
-                'note: --soft has no meaning since Mercurial has no '
-                'staging area\n\n'
+                b'note: --soft has no meaning since Mercurial has no '
+                b'staging area\n\n'
             )
         )
 
-    cmd = Command('update')
+    cmd = Command(b'update')
     if hard:
-        cmd.append('--clean')
+        cmd.append(b'--clean')
 
     cmd.append(commit)
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def revert(ui, repo, *args, **kwargs):
@@ -980,158 +980,158 @@
     if len(args) > 1:
         ui.status(
             _(
-                "note: hg backout doesn't support multiple commits at "
-                "once\n\n"
+                b"note: hg backout doesn't support multiple commits at "
+                b"once\n\n"
             )
         )
 
-    cmd = Command('backout')
+    cmd = Command(b'backout')
     if args:
         cmd.append(args[0])
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def revparse(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('', 'show-cdup', None, ''),
-        ('', 'show-toplevel', None, ''),
+        (b'', b'show-cdup', None, b''),
+        (b'', b'show-toplevel', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    if opts.get('show_cdup') or opts.get('show_toplevel'):
-        cmd = Command('root')
-        if opts.get('show_cdup'):
-            ui.status(_("note: hg root prints the root of the repository\n\n"))
-        ui.status((bytes(cmd)), "\n")
+    if opts.get(b'show_cdup') or opts.get(b'show_toplevel'):
+        cmd = Command(b'root')
+        if opts.get(b'show_cdup'):
+            ui.status(_(b"note: hg root prints the root of the repository\n\n"))
+        ui.status((bytes(cmd)), b"\n")
     else:
-        ui.status(_("note: see hg help revset for how to refer to commits\n"))
+        ui.status(_(b"note: see hg help revset for how to refer to commits\n"))
 
 
 def rm(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('f', 'force', None, ''),
-        ('n', 'dry-run', None, ''),
+        (b'f', b'force', None, b''),
+        (b'n', b'dry-run', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    cmd = Command('rm')
+    cmd = Command(b'rm')
     cmd.extend(args)
 
-    if opts.get('force'):
-        cmd['-f'] = None
-    if opts.get('dry_run'):
-        cmd['-n'] = None
+    if opts.get(b'force'):
+        cmd[b'-f'] = None
+    if opts.get(b'dry_run'):
+        cmd[b'-n'] = None
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def show(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('', 'name-status', None, ''),
-        ('', 'pretty', '', ''),
-        ('U', 'unified', int, ''),
+        (b'', b'name-status', None, b''),
+        (b'', b'pretty', b'', b''),
+        (b'U', b'unified', int, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    if opts.get('name_status'):
-        if opts.get('pretty') == 'format:':
-            cmd = Command('status')
-            cmd['--change'] = '.'
+    if opts.get(b'name_status'):
+        if opts.get(b'pretty') == b'format:':
+            cmd = Command(b'status')
+            cmd[b'--change'] = b'.'
         else:
-            cmd = Command('log')
-            cmd.append('--style status')
-            cmd.append('-r .')
+            cmd = Command(b'log')
+            cmd.append(b'--style status')
+            cmd.append(b'-r .')
     elif len(args) > 0:
         if ispath(repo, args[0]):
-            cmd = Command('cat')
+            cmd = Command(b'cat')
         else:
-            cmd = Command('export')
+            cmd = Command(b'export')
         cmd.extend(args)
-        if opts.get('unified'):
-            cmd.append('--config diff.unified=%d' % (opts['unified'],))
-    elif opts.get('unified'):
-        cmd = Command('export')
-        cmd.append('--config diff.unified=%d' % (opts['unified'],))
+        if opts.get(b'unified'):
+            cmd.append(b'--config diff.unified=%d' % (opts[b'unified'],))
+    elif opts.get(b'unified'):
+        cmd = Command(b'export')
+        cmd.append(b'--config diff.unified=%d' % (opts[b'unified'],))
     else:
-        cmd = Command('export')
+        cmd = Command(b'export')
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def stash(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('p', 'patch', None, ''),
+        (b'p', b'patch', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    cmd = Command('shelve')
+    cmd = Command(b'shelve')
     action = args[0] if len(args) > 0 else None
 
-    if action == 'list':
-        cmd['-l'] = None
-        if opts.get('patch'):
-            cmd['-p'] = None
-    elif action == 'show':
-        if opts.get('patch'):
-            cmd['-p'] = None
+    if action == b'list':
+        cmd[b'-l'] = None
+        if opts.get(b'patch'):
+            cmd[b'-p'] = None
+    elif action == b'show':
+        if opts.get(b'patch'):
+            cmd[b'-p'] = None
         else:
-            cmd['--stat'] = None
+            cmd[b'--stat'] = None
         if len(args) > 1:
             cmd.append(args[1])
-    elif action == 'clear':
-        cmd['--cleanup'] = None
-    elif action == 'drop':
-        cmd['-d'] = None
+    elif action == b'clear':
+        cmd[b'--cleanup'] = None
+    elif action == b'drop':
+        cmd[b'-d'] = None
         if len(args) > 1:
             cmd.append(args[1])
         else:
-            cmd.append('<shelve name>')
-    elif action == 'pop' or action == 'apply':
-        cmd = Command('unshelve')
+            cmd.append(b'<shelve name>')
+    elif action == b'pop' or action == b'apply':
+        cmd = Command(b'unshelve')
         if len(args) > 1:
             cmd.append(args[1])
-        if action == 'apply':
-            cmd['--keep'] = None
-    elif action == 'branch' or action == 'create':
+        if action == b'apply':
+            cmd[b'--keep'] = None
+    elif action == b'branch' or action == b'create':
         ui.status(
             _(
-                "note: Mercurial doesn't have equivalents to the "
-                "git stash branch or create actions\n\n"
+                b"note: Mercurial doesn't have equivalents to the "
+                b"git stash branch or create actions\n\n"
             )
         )
         return
     else:
         if len(args) > 0:
-            if args[0] != 'save':
-                cmd['--name'] = args[0]
+            if args[0] != b'save':
+                cmd[b'--name'] = args[0]
             elif len(args) > 1:
-                cmd['--name'] = args[1]
+                cmd[b'--name'] = args[1]
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def status(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('', 'ignored', None, ''),
+        (b'', b'ignored', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    cmd = Command('status')
+    cmd = Command(b'status')
     cmd.extend(args)
 
-    if opts.get('ignored'):
-        cmd['-i'] = None
+    if opts.get(b'ignored'):
+        cmd[b'-i'] = None
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def svn(ui, repo, *args, **kwargs):
     if not args:
-        raise error.Abort(_('missing svn command'))
+        raise error.Abort(_(b'missing svn command'))
     svncmd = args[0]
     if svncmd not in gitsvncommands:
-        raise error.Abort(_('unknown git svn command "%s"') % svncmd)
+        raise error.Abort(_(b'unknown git svn command "%s"') % svncmd)
 
     args = args[1:]
     return gitsvncommands[svncmd](ui, repo, *args, **kwargs)
@@ -1141,19 +1141,19 @@
     cmdoptions = []
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    cmd = Command('push')
+    cmd = Command(b'push')
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def svnfetch(ui, repo, *args, **kwargs):
     cmdoptions = []
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    cmd = Command('pull')
-    cmd.append('default-push')
+    cmd = Command(b'pull')
+    cmd.append(b'default-push')
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def svnfindrev(ui, repo, *args, **kwargs):
@@ -1161,101 +1161,101 @@
     args, opts = parseoptions(ui, cmdoptions, args)
 
     if not args:
-        raise error.Abort(_('missing find-rev argument'))
+        raise error.Abort(_(b'missing find-rev argument'))
 
-    cmd = Command('log')
-    cmd['-r'] = args[0]
+    cmd = Command(b'log')
+    cmd[b'-r'] = args[0]
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def svnrebase(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('l', 'local', None, ''),
+        (b'l', b'local', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    pullcmd = Command('pull')
-    pullcmd.append('default-push')
-    rebasecmd = Command('rebase')
-    rebasecmd.append('tip')
+    pullcmd = Command(b'pull')
+    pullcmd.append(b'default-push')
+    rebasecmd = Command(b'rebase')
+    rebasecmd.append(b'tip')
 
     cmd = pullcmd & rebasecmd
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 def tag(ui, repo, *args, **kwargs):
     cmdoptions = [
-        ('f', 'force', None, ''),
-        ('l', 'list', None, ''),
-        ('d', 'delete', None, ''),
+        (b'f', b'force', None, b''),
+        (b'l', b'list', None, b''),
+        (b'd', b'delete', None, b''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    if opts.get('list'):
-        cmd = Command('tags')
+    if opts.get(b'list'):
+        cmd = Command(b'tags')
     else:
-        cmd = Command('tag')
+        cmd = Command(b'tag')
 
         if not args:
-            raise error.Abort(_('missing tag argument'))
+            raise error.Abort(_(b'missing tag argument'))
 
         cmd.append(args[0])
         if len(args) > 1:
-            cmd['-r'] = args[1]
+            cmd[b'-r'] = args[1]
 
-        if opts.get('delete'):
-            cmd['--remove'] = None
+        if opts.get(b'delete'):
+            cmd[b'--remove'] = None
 
-        if opts.get('force'):
-            cmd['-f'] = None
+        if opts.get(b'force'):
+            cmd[b'-f'] = None
 
-    ui.status((bytes(cmd)), "\n")
+    ui.status((bytes(cmd)), b"\n")
 
 
 gitcommands = {
-    'add': add,
-    'am': am,
-    'apply': apply,
-    'bisect': bisect,
-    'blame': blame,
-    'branch': branch,
-    'checkout': checkout,
-    'cherry-pick': cherrypick,
-    'clean': clean,
-    'clone': clone,
-    'commit': commit,
-    'diff': diff,
-    'difftool': difftool,
-    'fetch': fetch,
-    'grep': grep,
-    'init': init,
-    'log': log,
-    'ls-files': lsfiles,
-    'merge': merge,
-    'merge-base': mergebase,
-    'mergetool': mergetool,
-    'mv': mv,
-    'pull': pull,
-    'push': push,
-    'rebase': rebase,
-    'reflog': reflog,
-    'reset': reset,
-    'revert': revert,
-    'rev-parse': revparse,
-    'rm': rm,
-    'show': show,
-    'stash': stash,
-    'status': status,
-    'svn': svn,
-    'tag': tag,
-    'whatchanged': deprecated,
+    b'add': add,
+    b'am': am,
+    b'apply': apply,
+    b'bisect': bisect,
+    b'blame': blame,
+    b'branch': branch,
+    b'checkout': checkout,
+    b'cherry-pick': cherrypick,
+    b'clean': clean,
+    b'clone': clone,
+    b'commit': commit,
+    b'diff': diff,
+    b'difftool': difftool,
+    b'fetch': fetch,
+    b'grep': grep,
+    b'init': init,
+    b'log': log,
+    b'ls-files': lsfiles,
+    b'merge': merge,
+    b'merge-base': mergebase,
+    b'mergetool': mergetool,
+    b'mv': mv,
+    b'pull': pull,
+    b'push': push,
+    b'rebase': rebase,
+    b'reflog': reflog,
+    b'reset': reset,
+    b'revert': revert,
+    b'rev-parse': revparse,
+    b'rm': rm,
+    b'show': show,
+    b'stash': stash,
+    b'status': status,
+    b'svn': svn,
+    b'tag': tag,
+    b'whatchanged': deprecated,
 }
 
 gitsvncommands = {
-    'dcommit': svndcommit,
-    'fetch': svnfetch,
-    'find-rev': svnfindrev,
-    'rebase': svnrebase,
+    b'dcommit': svndcommit,
+    b'fetch': svnfetch,
+    b'find-rev': svnfindrev,
+    b'rebase': svnrebase,
 }
--- a/hgext/gpg.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/gpg.py	Sun Oct 06 09:48:39 2019 -0400
@@ -31,36 +31,36 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 configtable = {}
 configitem = registrar.configitem(configtable)
 
 configitem(
-    'gpg', 'cmd', default='gpg',
+    b'gpg', b'cmd', default=b'gpg',
 )
 configitem(
-    'gpg', 'key', default=None,
+    b'gpg', b'key', default=None,
 )
 configitem(
-    'gpg', '.*', default=None, generic=True,
+    b'gpg', b'.*', default=None, generic=True,
 )
 
 # Custom help category
-_HELP_CATEGORY = 'gpg'
+_HELP_CATEGORY = b'gpg'
 help.CATEGORY_ORDER.insert(
     help.CATEGORY_ORDER.index(registrar.command.CATEGORY_HELP), _HELP_CATEGORY
 )
-help.CATEGORY_NAMES[_HELP_CATEGORY] = 'Signing changes (GPG)'
+help.CATEGORY_NAMES[_HELP_CATEGORY] = b'Signing changes (GPG)'
 
 
 class gpg(object):
     def __init__(self, path, key=None):
         self.path = path
-        self.key = (key and " --local-user \"%s\"" % key) or ""
+        self.key = (key and b" --local-user \"%s\"" % key) or b""
 
     def sign(self, data):
-        gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
+        gpgcmd = b"%s --sign --detach-sign%s" % (self.path, self.key)
         return procutil.filter(data, gpgcmd)
 
     def verify(self, data, sig):
@@ -68,20 +68,20 @@
         sigfile = datafile = None
         try:
             # create temporary files
-            fd, sigfile = pycompat.mkstemp(prefix="hg-gpg-", suffix=".sig")
+            fd, sigfile = pycompat.mkstemp(prefix=b"hg-gpg-", suffix=b".sig")
             fp = os.fdopen(fd, r'wb')
             fp.write(sig)
             fp.close()
-            fd, datafile = pycompat.mkstemp(prefix="hg-gpg-", suffix=".txt")
+            fd, datafile = pycompat.mkstemp(prefix=b"hg-gpg-", suffix=b".txt")
             fp = os.fdopen(fd, r'wb')
             fp.write(data)
             fp.close()
-            gpgcmd = "%s --logger-fd 1 --status-fd 1 --verify " "\"%s\" \"%s\"" % (
+            gpgcmd = b"%s --logger-fd 1 --status-fd 1 --verify " b"\"%s\" \"%s\"" % (
                 self.path,
                 sigfile,
                 datafile,
             )
-            ret = procutil.filter("", gpgcmd)
+            ret = procutil.filter(b"", gpgcmd)
         finally:
             for f in (sigfile, datafile):
                 try:
@@ -94,25 +94,25 @@
         for l in ret.splitlines():
             # see DETAILS in the gnupg documentation
             # filter the logger output
-            if not l.startswith("[GNUPG:]"):
+            if not l.startswith(b"[GNUPG:]"):
                 continue
             l = l[9:]
-            if l.startswith("VALIDSIG"):
+            if l.startswith(b"VALIDSIG"):
                 # fingerprint of the primary key
                 fingerprint = l.split()[10]
-            elif l.startswith("ERRSIG"):
-                key = l.split(" ", 3)[:2]
-                key.append("")
+            elif l.startswith(b"ERRSIG"):
+                key = l.split(b" ", 3)[:2]
+                key.append(b"")
                 fingerprint = None
             elif (
-                l.startswith("GOODSIG")
-                or l.startswith("EXPSIG")
-                or l.startswith("EXPKEYSIG")
-                or l.startswith("BADSIG")
+                l.startswith(b"GOODSIG")
+                or l.startswith(b"EXPSIG")
+                or l.startswith(b"EXPKEYSIG")
+                or l.startswith(b"BADSIG")
             ):
                 if key is not None:
                     keys.append(key + [fingerprint])
-                key = l.split(" ", 2)
+                key = l.split(b" ", 2)
                 fingerprint = None
         if key is not None:
             keys.append(key + [fingerprint])
@@ -121,10 +121,10 @@
 
 def newgpg(ui, **opts):
     """create a new gpg instance"""
-    gpgpath = ui.config("gpg", "cmd")
+    gpgpath = ui.config(b"gpg", b"cmd")
     gpgkey = opts.get(r'key')
     if not gpgkey:
-        gpgkey = ui.config("gpg", "key")
+        gpgkey = ui.config(b"gpg", b"key")
     return gpg(gpgpath, gpgkey)
 
 
@@ -139,18 +139,18 @@
         for l in fileiter:
             if not l:
                 continue
-            yield (l.split(" ", 2), (context, ln))
+            yield (l.split(b" ", 2), (context, ln))
             ln += 1
 
     # read the heads
-    fl = repo.file(".hgsigs")
+    fl = repo.file(b".hgsigs")
     for r in reversed(fl.heads()):
-        fn = ".hgsigs|%s" % hgnode.short(r)
+        fn = b".hgsigs|%s" % hgnode.short(r)
         for item in parsefile(fl.read(r).splitlines(), fn):
             yield item
     try:
         # read local signatures
-        fn = "localsigs"
+        fn = b"localsigs"
         for item in parsefile(repo.vfs(fn), fn):
             yield item
     except IOError:
@@ -161,7 +161,7 @@
     """get the keys who signed a data"""
     fn, ln = context
     node, version, sig = sigdata
-    prefix = "%s:%d" % (fn, ln)
+    prefix = b"%s:%d" % (fn, ln)
     node = hgnode.bin(node)
 
     data = node2txt(repo, node, version)
@@ -171,27 +171,27 @@
     validkeys = []
     # warn for expired key and/or sigs
     for key in keys:
-        if key[0] == "ERRSIG":
-            ui.write(_("%s Unknown key ID \"%s\"\n") % (prefix, key[1]))
+        if key[0] == b"ERRSIG":
+            ui.write(_(b"%s Unknown key ID \"%s\"\n") % (prefix, key[1]))
             continue
-        if key[0] == "BADSIG":
-            ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
+        if key[0] == b"BADSIG":
+            ui.write(_(b"%s Bad signature from \"%s\"\n") % (prefix, key[2]))
             continue
-        if key[0] == "EXPSIG":
+        if key[0] == b"EXPSIG":
             ui.write(
-                _("%s Note: Signature has expired" " (signed by: \"%s\")\n")
+                _(b"%s Note: Signature has expired" b" (signed by: \"%s\")\n")
                 % (prefix, key[2])
             )
-        elif key[0] == "EXPKEYSIG":
+        elif key[0] == b"EXPKEYSIG":
             ui.write(
-                _("%s Note: This key has expired" " (signed by: \"%s\")\n")
+                _(b"%s Note: This key has expired" b" (signed by: \"%s\")\n")
                 % (prefix, key[2])
             )
         validkeys.append((key[1], key[2], key[3]))
     return validkeys
 
 
-@command("sigs", [], _('hg sigs'), helpcategory=_HELP_CATEGORY)
+@command(b"sigs", [], _(b'hg sigs'), helpcategory=_HELP_CATEGORY)
 def sigs(ui, repo):
     """list signed changesets"""
     mygpg = newgpg(ui)
@@ -203,7 +203,7 @@
         try:
             n = repo.lookup(node)
         except KeyError:
-            ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
+            ui.warn(_(b"%s:%d node does not exist\n") % (fn, ln))
             continue
         r = repo.changelog.rev(n)
         keys = getkeys(ui, repo, mygpg, data, context)
@@ -213,11 +213,11 @@
         revs[r].extend(keys)
     for rev in sorted(revs, reverse=True):
         for k in revs[rev]:
-            r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
-            ui.write("%-30s %s\n" % (keystr(ui, k), r))
+            r = b"%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
+            ui.write(b"%-30s %s\n" % (keystr(ui, k), r))
 
 
-@command("sigcheck", [], _('hg sigcheck REV'), helpcategory=_HELP_CATEGORY)
+@command(b"sigcheck", [], _(b'hg sigcheck REV'), helpcategory=_HELP_CATEGORY)
 def sigcheck(ui, repo, rev):
     """verify all the signatures there may be for a particular revision"""
     mygpg = newgpg(ui)
@@ -233,37 +233,42 @@
                 keys.extend(k)
 
     if not keys:
-        ui.write(_("no valid signature for %s\n") % hgnode.short(rev))
+        ui.write(_(b"no valid signature for %s\n") % hgnode.short(rev))
         return
 
     # print summary
-    ui.write(_("%s is signed by:\n") % hgnode.short(rev))
+    ui.write(_(b"%s is signed by:\n") % hgnode.short(rev))
     for key in keys:
-        ui.write(" %s\n" % keystr(ui, key))
+        ui.write(b" %s\n" % keystr(ui, key))
 
 
 def keystr(ui, key):
     """associate a string to a key (username, comment)"""
     keyid, user, fingerprint = key
-    comment = ui.config("gpg", fingerprint)
+    comment = ui.config(b"gpg", fingerprint)
     if comment:
-        return "%s (%s)" % (user, comment)
+        return b"%s (%s)" % (user, comment)
     else:
         return user
 
 
 @command(
-    "sign",
+    b"sign",
     [
-        ('l', 'local', None, _('make the signature local')),
-        ('f', 'force', None, _('sign even if the sigfile is modified')),
-        ('', 'no-commit', None, _('do not commit the sigfile after signing')),
-        ('k', 'key', '', _('the key id to sign with'), _('ID')),
-        ('m', 'message', '', _('use text as commit message'), _('TEXT')),
-        ('e', 'edit', False, _('invoke editor on commit messages')),
+        (b'l', b'local', None, _(b'make the signature local')),
+        (b'f', b'force', None, _(b'sign even if the sigfile is modified')),
+        (
+            b'',
+            b'no-commit',
+            None,
+            _(b'do not commit the sigfile after signing'),
+        ),
+        (b'k', b'key', b'', _(b'the key id to sign with'), _(b'ID')),
+        (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
+        (b'e', b'edit', False, _(b'invoke editor on commit messages')),
     ]
     + cmdutil.commitopts2,
-    _('hg sign [OPTION]... [REV]...'),
+    _(b'hg sign [OPTION]... [REV]...'),
     helpcategory=_HELP_CATEGORY,
 )
 def sign(ui, repo, *revs, **opts):
@@ -284,12 +289,12 @@
 def _dosign(ui, repo, *revs, **opts):
     mygpg = newgpg(ui, **opts)
     opts = pycompat.byteskwargs(opts)
-    sigver = "0"
-    sigmessage = ""
+    sigver = b"0"
+    sigmessage = b""
 
-    date = opts.get('date')
+    date = opts.get(b'date')
     if date:
-        opts['date'] = dateutil.parsedate(date)
+        opts[b'date'] = dateutil.parsedate(date)
 
     if revs:
         nodes = [repo.lookup(n) for n in revs]
@@ -299,7 +304,7 @@
         ]
         if len(nodes) > 1:
             raise error.Abort(
-                _('uncommitted merge - please provide a ' 'specific revision')
+                _(b'uncommitted merge - please provide a ' b'specific revision')
             )
         if not nodes:
             nodes = [repo.changelog.tip()]
@@ -307,55 +312,55 @@
     for n in nodes:
         hexnode = hgnode.hex(n)
         ui.write(
-            _("signing %d:%s\n") % (repo.changelog.rev(n), hgnode.short(n))
+            _(b"signing %d:%s\n") % (repo.changelog.rev(n), hgnode.short(n))
         )
         # build data
         data = node2txt(repo, n, sigver)
         sig = mygpg.sign(data)
         if not sig:
-            raise error.Abort(_("error while signing"))
+            raise error.Abort(_(b"error while signing"))
         sig = binascii.b2a_base64(sig)
-        sig = sig.replace("\n", "")
-        sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
+        sig = sig.replace(b"\n", b"")
+        sigmessage += b"%s %s %s\n" % (hexnode, sigver, sig)
 
     # write it
-    if opts['local']:
-        repo.vfs.append("localsigs", sigmessage)
+    if opts[b'local']:
+        repo.vfs.append(b"localsigs", sigmessage)
         return
 
-    if not opts["force"]:
-        msigs = match.exact(['.hgsigs'])
+    if not opts[b"force"]:
+        msigs = match.exact([b'.hgsigs'])
         if any(repo.status(match=msigs, unknown=True, ignored=True)):
             raise error.Abort(
-                _("working copy of .hgsigs is changed "),
-                hint=_("please commit .hgsigs manually"),
+                _(b"working copy of .hgsigs is changed "),
+                hint=_(b"please commit .hgsigs manually"),
             )
 
-    sigsfile = repo.wvfs(".hgsigs", "ab")
+    sigsfile = repo.wvfs(b".hgsigs", b"ab")
     sigsfile.write(sigmessage)
     sigsfile.close()
 
-    if '.hgsigs' not in repo.dirstate:
-        repo[None].add([".hgsigs"])
+    if b'.hgsigs' not in repo.dirstate:
+        repo[None].add([b".hgsigs"])
 
-    if opts["no_commit"]:
+    if opts[b"no_commit"]:
         return
 
-    message = opts['message']
+    message = opts[b'message']
     if not message:
         # we don't translate commit messages
-        message = "\n".join(
+        message = b"\n".join(
             [
-                "Added signature for changeset %s" % hgnode.short(n)
+                b"Added signature for changeset %s" % hgnode.short(n)
                 for n in nodes
             ]
         )
     try:
         editor = cmdutil.getcommiteditor(
-            editform='gpg.sign', **pycompat.strkwargs(opts)
+            editform=b'gpg.sign', **pycompat.strkwargs(opts)
         )
         repo.commit(
-            message, opts['user'], opts['date'], match=msigs, editor=editor
+            message, opts[b'user'], opts[b'date'], match=msigs, editor=editor
         )
     except ValueError as inst:
         raise error.Abort(pycompat.bytestr(inst))
@@ -363,10 +368,10 @@
 
 def node2txt(repo, node, ver):
     """map a manifest into some text"""
-    if ver == "0":
-        return "%s\n" % hgnode.hex(node)
+    if ver == b"0":
+        return b"%s\n" % hgnode.hex(node)
     else:
-        raise error.Abort(_("unknown signature version"))
+        raise error.Abort(_(b"unknown signature version"))
 
 
 def extsetup(ui):
@@ -374,4 +379,4 @@
     help.CATEGORY_ORDER.insert(
         help.CATEGORY_ORDER.index(command.CATEGORY_MAINTENANCE), _HELP_CATEGORY
     )
-    help.CATEGORY_NAMES[_HELP_CATEGORY] = 'GPG signing'
+    help.CATEGORY_NAMES[_HELP_CATEGORY] = b'GPG signing'
--- a/hgext/graphlog.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/graphlog.py	Sun Oct 06 09:48:39 2019 -0400
@@ -30,66 +30,83 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 
 @command(
-    'glog',
+    b'glog',
     [
         (
-            'f',
-            'follow',
+            b'f',
+            b'follow',
             None,
             _(
-                'follow changeset history, or file history across copies and renames'
+                b'follow changeset history, or file history across copies and renames'
             ),
         ),
         (
-            '',
-            'follow-first',
+            b'',
+            b'follow-first',
             None,
-            _('only follow the first parent of merge changesets (DEPRECATED)'),
+            _(b'only follow the first parent of merge changesets (DEPRECATED)'),
         ),
-        ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
-        ('C', 'copies', None, _('show copied files')),
         (
-            'k',
-            'keyword',
+            b'd',
+            b'date',
+            b'',
+            _(b'show revisions matching date spec'),
+            _(b'DATE'),
+        ),
+        (b'C', b'copies', None, _(b'show copied files')),
+        (
+            b'k',
+            b'keyword',
             [],
-            _('do case-insensitive search for a given text'),
-            _('TEXT'),
+            _(b'do case-insensitive search for a given text'),
+            _(b'TEXT'),
+        ),
+        (
+            b'r',
+            b'rev',
+            [],
+            _(b'show the specified revision or revset'),
+            _(b'REV'),
         ),
-        ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
-        ('', 'removed', None, _('include revisions where files were removed')),
-        ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
-        ('u', 'user', [], _('revisions committed by user'), _('USER')),
         (
-            '',
-            'only-branch',
+            b'',
+            b'removed',
+            None,
+            _(b'include revisions where files were removed'),
+        ),
+        (b'm', b'only-merges', None, _(b'show only merges (DEPRECATED)')),
+        (b'u', b'user', [], _(b'revisions committed by user'), _(b'USER')),
+        (
+            b'',
+            b'only-branch',
             [],
             _(
-                'show only changesets within the given named branch (DEPRECATED)'
+                b'show only changesets within the given named branch (DEPRECATED)'
             ),
-            _('BRANCH'),
+            _(b'BRANCH'),
         ),
         (
-            'b',
-            'branch',
+            b'b',
+            b'branch',
             [],
-            _('show changesets within the given named branch'),
-            _('BRANCH'),
+            _(b'show changesets within the given named branch'),
+            _(b'BRANCH'),
         ),
         (
-            'P',
-            'prune',
+            b'P',
+            b'prune',
             [],
-            _('do not display revision or any of its ancestors'),
-            _('REV'),
+            _(b'do not display revision or any of its ancestors'),
+            _(b'REV'),
         ),
     ]
     + cmdutil.logopts
     + cmdutil.walkopts,
-    _('[OPTION]... [FILE]'),
+    _(b'[OPTION]... [FILE]'),
     helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
     inferrepo=True,
 )
--- a/hgext/hgk.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/hgk.py	Sun Oct 06 09:48:39 2019 -0400
@@ -59,27 +59,27 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 configtable = {}
 configitem = registrar.configitem(configtable)
 
 configitem(
-    'hgk', 'path', default='hgk',
+    b'hgk', b'path', default=b'hgk',
 )
 
 
 @command(
-    'debug-diff-tree',
+    b'debug-diff-tree',
     [
-        ('p', 'patch', None, _('generate patch')),
-        ('r', 'recursive', None, _('recursive')),
-        ('P', 'pretty', None, _('pretty')),
-        ('s', 'stdin', None, _('stdin')),
-        ('C', 'copy', None, _('detect copies')),
-        ('S', 'search', "", _('search')),
+        (b'p', b'patch', None, _(b'generate patch')),
+        (b'r', b'recursive', None, _(b'recursive')),
+        (b'P', b'pretty', None, _(b'pretty')),
+        (b's', b'stdin', None, _(b'stdin')),
+        (b'C', b'copy', None, _(b'detect copies')),
+        (b'S', b'search', b"", _(b'search')),
     ],
-    '[OPTION]... NODE1 NODE2 [FILE]...',
+    b'[OPTION]... NODE1 NODE2 [FILE]...',
     inferrepo=True,
 )
 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
@@ -98,17 +98,17 @@
         for f in modified:
             # TODO get file permissions
             ui.write(
-                ":100664 100664 %s %s M\t%s\t%s\n"
+                b":100664 100664 %s %s M\t%s\t%s\n"
                 % (short(mmap[f]), short(mmap2[f]), f, f)
             )
         for f in added:
             ui.write(
-                ":000000 100664 %s %s N\t%s\t%s\n"
+                b":000000 100664 %s %s N\t%s\t%s\n"
                 % (empty, short(mmap2[f]), f, f)
             )
         for f in removed:
             ui.write(
-                ":100664 000000 %s %s D\t%s\t%s\n"
+                b":100664 000000 %s %s D\t%s\t%s\n"
                 % (short(mmap[f]), empty, f, f)
             )
 
@@ -133,7 +133,7 @@
             node1 = repo.changelog.parents(node1)[0]
         if opts[r'patch']:
             if opts[r'pretty']:
-                catcommit(ui, repo, node2, "")
+                catcommit(ui, repo, node2, b"")
             m = scmutil.match(repo[node1], files)
             diffopts = patch.difffeatureopts(ui)
             diffopts.git = True
@@ -147,51 +147,51 @@
 
 
 def catcommit(ui, repo, n, prefix, ctx=None):
-    nlprefix = '\n' + prefix
+    nlprefix = b'\n' + prefix
     if ctx is None:
         ctx = repo[n]
     # use ctx.node() instead ??
-    ui.write(("tree %s\n" % short(ctx.changeset()[0])))
+    ui.write((b"tree %s\n" % short(ctx.changeset()[0])))
     for p in ctx.parents():
-        ui.write(("parent %s\n" % p))
+        ui.write((b"parent %s\n" % p))
 
     date = ctx.date()
-    description = ctx.description().replace("\0", "")
-    ui.write(("author %s %d %d\n" % (ctx.user(), int(date[0]), date[1])))
+    description = ctx.description().replace(b"\0", b"")
+    ui.write((b"author %s %d %d\n" % (ctx.user(), int(date[0]), date[1])))
 
-    if 'committer' in ctx.extra():
-        ui.write(("committer %s\n" % ctx.extra()['committer']))
+    if b'committer' in ctx.extra():
+        ui.write((b"committer %s\n" % ctx.extra()[b'committer']))
 
-    ui.write(("revision %d\n" % ctx.rev()))
-    ui.write(("branch %s\n" % ctx.branch()))
+    ui.write((b"revision %d\n" % ctx.rev()))
+    ui.write((b"branch %s\n" % ctx.branch()))
     if obsolete.isenabled(repo, obsolete.createmarkersopt):
         if ctx.obsolete():
-            ui.write("obsolete\n")
-    ui.write(("phase %s\n\n" % ctx.phasestr()))
+            ui.write(b"obsolete\n")
+    ui.write((b"phase %s\n\n" % ctx.phasestr()))
 
-    if prefix != "":
+    if prefix != b"":
         ui.write(
-            "%s%s\n" % (prefix, description.replace('\n', nlprefix).strip())
+            b"%s%s\n" % (prefix, description.replace(b'\n', nlprefix).strip())
         )
     else:
-        ui.write(description + "\n")
+        ui.write(description + b"\n")
     if prefix:
-        ui.write('\0')
+        ui.write(b'\0')
 
 
-@command('debug-merge-base', [], _('REV REV'))
+@command(b'debug-merge-base', [], _(b'REV REV'))
 def base(ui, repo, node1, node2):
     """output common ancestor information"""
     node1 = repo.lookup(node1)
     node2 = repo.lookup(node2)
     n = repo.changelog.ancestor(node1, node2)
-    ui.write(short(n) + "\n")
+    ui.write(short(n) + b"\n")
 
 
 @command(
-    'debug-cat-file',
-    [('s', 'stdin', None, _('stdin'))],
-    _('[OPTION]... TYPE FILE'),
+    b'debug-cat-file',
+    [(b's', b'stdin', None, _(b'stdin'))],
+    _(b'[OPTION]... TYPE FILE'),
     inferrepo=True,
 )
 def catfile(ui, repo, type=None, r=None, **opts):
@@ -200,21 +200,21 @@
     # spaces.  This way the our caller can find the commit without magic
     # strings
     #
-    prefix = ""
+    prefix = b""
     if opts[r'stdin']:
         line = ui.fin.readline()
         if not line:
             return
         (type, r) = line.rstrip(pycompat.oslinesep).split(b' ')
-        prefix = "    "
+        prefix = b"    "
     else:
         if not type or not r:
-            ui.warn(_("cat-file: type or revision not supplied\n"))
-            commands.help_(ui, 'cat-file')
+            ui.warn(_(b"cat-file: type or revision not supplied\n"))
+            commands.help_(ui, b'cat-file')
 
     while r:
-        if type != "commit":
-            ui.warn(_("aborting hg cat-file only understands commits\n"))
+        if type != b"commit":
+            ui.warn(_(b"aborting hg cat-file only understands commits\n"))
             return 1
         n = repo.lookup(r)
         catcommit(ui, repo, n, prefix)
@@ -232,7 +232,7 @@
 # telling you which commits are reachable from the supplied ones via
 # a bitmask based on arg position.
 # you can specify a commit to stop at by starting the sha1 with ^
-def revtree(ui, args, repo, full="tree", maxnr=0, parents=False):
+def revtree(ui, args, repo, full=b"tree", maxnr=0, parents=False):
     def chlogwalk():
         count = len(repo)
         i = count
@@ -281,11 +281,11 @@
     # figure out which commits they are asking for and which ones they
     # want us to stop on
     for i, arg in enumerate(args):
-        if arg.startswith('^'):
+        if arg.startswith(b'^'):
             s = repo.lookup(arg[1:])
             stop_sha1.append(s)
             want_sha1.append(s)
-        elif arg != 'HEAD':
+        elif arg != b'HEAD':
             want_sha1.append(repo.lookup(arg))
 
     # calculate the graph for the supplied commits
@@ -312,32 +312,32 @@
         n = repo.changelog.node(i)
         mask = is_reachable(want_sha1, reachable, n)
         if mask:
-            parentstr = ""
+            parentstr = b""
             if parents:
                 pp = repo.changelog.parents(n)
                 if pp[0] != nullid:
-                    parentstr += " " + short(pp[0])
+                    parentstr += b" " + short(pp[0])
                 if pp[1] != nullid:
-                    parentstr += " " + short(pp[1])
+                    parentstr += b" " + short(pp[1])
             if not full:
-                ui.write("%s%s\n" % (short(n), parentstr))
-            elif full == "commit":
-                ui.write("%s%s\n" % (short(n), parentstr))
-                catcommit(ui, repo, n, '    ', ctx)
+                ui.write(b"%s%s\n" % (short(n), parentstr))
+            elif full == b"commit":
+                ui.write(b"%s%s\n" % (short(n), parentstr))
+                catcommit(ui, repo, n, b'    ', ctx)
             else:
                 (p1, p2) = repo.changelog.parents(n)
                 (h, h1, h2) = map(short, (n, p1, p2))
                 (i1, i2) = map(repo.changelog.rev, (p1, p2))
 
                 date = ctx.date()[0]
-                ui.write("%s %s:%s" % (date, h, mask))
+                ui.write(b"%s %s:%s" % (date, h, mask))
                 mask = is_reachable(want_sha1, reachable, p1)
                 if i1 != nullrev and mask > 0:
-                    ui.write("%s:%s " % (h1, mask)),
+                    ui.write(b"%s:%s " % (h1, mask)),
                 mask = is_reachable(want_sha1, reachable, p2)
                 if i2 != nullrev and mask > 0:
-                    ui.write("%s:%s " % (h2, mask))
-                ui.write("\n")
+                    ui.write(b"%s:%s " % (h2, mask))
+                ui.write(b"\n")
             if maxnr and count >= maxnr:
                 break
             count += 1
@@ -347,19 +347,19 @@
 # at a given commit without walking the whole repo.  TODO add the stop
 # parameter
 @command(
-    'debug-rev-list',
+    b'debug-rev-list',
     [
-        ('H', 'header', None, _('header')),
-        ('t', 'topo-order', None, _('topo-order')),
-        ('p', 'parents', None, _('parents')),
-        ('n', 'max-count', 0, _('max-count')),
+        (b'H', b'header', None, _(b'header')),
+        (b't', b'topo-order', None, _(b'topo-order')),
+        (b'p', b'parents', None, _(b'parents')),
+        (b'n', b'max-count', 0, _(b'max-count')),
     ],
-    '[OPTION]... REV...',
+    b'[OPTION]... REV...',
 )
 def revlist(ui, repo, *revs, **opts):
     """print revisions"""
-    if opts['header']:
-        full = "commit"
+    if opts[b'header']:
+        full = b"commit"
     else:
         full = None
     copy = [x for x in revs]
@@ -367,19 +367,19 @@
 
 
 @command(
-    'view',
-    [('l', 'limit', '', _('limit number of changes displayed'), _('NUM'))],
-    _('[-l LIMIT] [REVRANGE]'),
+    b'view',
+    [(b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM'))],
+    _(b'[-l LIMIT] [REVRANGE]'),
     helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
 )
 def view(ui, repo, *etc, **opts):
-    "start interactive history viewer"
+    b"start interactive history viewer"
     opts = pycompat.byteskwargs(opts)
     os.chdir(repo.root)
-    optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
+    optstr = b' '.join([b'--%s %s' % (k, v) for k, v in opts.iteritems() if v])
     if repo.filtername is None:
-        optstr += '--hidden'
+        optstr += b'--hidden'
 
-    cmd = ui.config("hgk", "path") + " %s %s" % (optstr, " ".join(etc))
-    ui.debug("running %s\n" % cmd)
-    ui.system(cmd, blockedtag='hgk_view')
+    cmd = ui.config(b"hgk", b"path") + b" %s %s" % (optstr, b" ".join(etc))
+    ui.debug(b"running %s\n" % cmd)
+    ui.system(cmd, blockedtag=b'hgk_view')
--- a/hgext/highlight/__init__.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/highlight/__init__.py	Sun Oct 06 09:48:39 2019 -0400
@@ -43,13 +43,13 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 
 def pygmentize(web, field, fctx, tmpl):
-    style = web.config('web', 'pygments_style', 'colorful')
-    expr = web.config('web', 'highlightfiles', "size('<5M')")
-    filenameonly = web.configbool('web', 'highlightonlymatchfilename', False)
+    style = web.config(b'web', b'pygments_style', b'colorful')
+    expr = web.config(b'web', b'highlightfiles', b"size('<5M')")
+    filenameonly = web.configbool(b'web', b'highlightonlymatchfilename', False)
 
     ctx = fctx.changectx()
     m = ctx.matchfileset(expr)
@@ -60,7 +60,7 @@
 
 
 def filerevision_highlight(orig, web, fctx):
-    mt = web.res.headers['Content-Type']
+    mt = web.res.headers[b'Content-Type']
     # only pygmentize for mimetype containing 'html' so we both match
     # 'text/html' and possibly 'application/xhtml+xml' in the future
     # so that we don't have to touch the extension when the mimetype
@@ -68,30 +68,30 @@
     # raw file is sent using rawfile() and doesn't call us, so we
     # can't clash with the file's content-type here in case we
     # pygmentize a html file
-    if 'html' in mt:
-        pygmentize(web, 'fileline', fctx, web.tmpl)
+    if b'html' in mt:
+        pygmentize(web, b'fileline', fctx, web.tmpl)
 
     return orig(web, fctx)
 
 
 def annotate_highlight(orig, web):
-    mt = web.res.headers['Content-Type']
-    if 'html' in mt:
+    mt = web.res.headers[b'Content-Type']
+    if b'html' in mt:
         fctx = webutil.filectx(web.repo, web.req)
-        pygmentize(web, 'annotateline', fctx, web.tmpl)
+        pygmentize(web, b'annotateline', fctx, web.tmpl)
 
     return orig(web)
 
 
 def generate_css(web):
-    pg_style = web.config('web', 'pygments_style', 'colorful')
+    pg_style = web.config(b'web', b'pygments_style', b'colorful')
     fmter = highlight.HtmlFormatter(style=pycompat.sysstr(pg_style))
-    web.res.headers['Content-Type'] = 'text/css'
-    style_defs = fmter.get_style_defs(pycompat.sysstr(''))
+    web.res.headers[b'Content-Type'] = b'text/css'
+    style_defs = fmter.get_style_defs(pycompat.sysstr(b''))
     web.res.setbodybytes(
-        ''.join(
+        b''.join(
             [
-                '/* pygments_style = %s */\n\n' % pg_style,
+                b'/* pygments_style = %s */\n\n' % pg_style,
                 pycompat.bytestr(style_defs),
             ]
         )
@@ -102,8 +102,8 @@
 def extsetup(ui):
     # monkeypatch in the new version
     extensions.wrapfunction(
-        webcommands, '_filerevision', filerevision_highlight
+        webcommands, b'_filerevision', filerevision_highlight
     )
-    extensions.wrapfunction(webcommands, 'annotate', annotate_highlight)
+    extensions.wrapfunction(webcommands, b'annotate', annotate_highlight)
     webcommands.highlightcss = generate_css
-    webcommands.__all__.append('highlightcss')
+    webcommands.__all__.append(b'highlightcss')
--- a/hgext/highlight/highlight.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/highlight/highlight.py	Sun Oct 06 09:48:39 2019 -0400
@@ -12,7 +12,7 @@
 
 from mercurial import demandimport
 
-demandimport.IGNORES.update(['pkgutil', 'pkg_resources', '__main__'])
+demandimport.IGNORES.update([b'pkgutil', b'pkg_resources', b'__main__'])
 
 from mercurial import (
     encoding,
@@ -39,27 +39,27 @@
 HtmlFormatter = pygments.formatters.HtmlFormatter
 
 SYNTAX_CSS = (
-    '\n<link rel="stylesheet" href="{url}highlightcss" ' 'type="text/css" />'
+    b'\n<link rel="stylesheet" href="{url}highlightcss" ' b'type="text/css" />'
 )
 
 
 def pygmentize(field, fctx, style, tmpl, guessfilenameonly=False):
 
     # append a <link ...> to the syntax highlighting css
-    tmpl.load('header')
-    old_header = tmpl.cache['header']
+    tmpl.load(b'header')
+    old_header = tmpl.cache[b'header']
     if SYNTAX_CSS not in old_header:
         new_header = old_header + SYNTAX_CSS
-        tmpl.cache['header'] = new_header
+        tmpl.cache[b'header'] = new_header
 
     text = fctx.data()
     if stringutil.binary(text):
         return
 
     # str.splitlines() != unicode.splitlines() because "reasons"
-    for c in "\x0c\x1c\x1d\x1e":
+    for c in b"\x0c\x1c\x1d\x1e":
         if c in text:
-            text = text.replace(c, '')
+            text = text.replace(c, b'')
 
     # Pygments is best used with Unicode strings:
     # <http://pygments.org/docs/unicode/>
@@ -94,8 +94,8 @@
         for s in colorized.splitlines()
     )
 
-    tmpl._filters['colorize'] = lambda x: next(coloriter)
+    tmpl._filters[b'colorize'] = lambda x: next(coloriter)
 
     oldl = tmpl.cache[field]
-    newl = oldl.replace('line|escape', 'line|colorize')
+    newl = oldl.replace(b'line|escape', b'line|colorize')
     tmpl.cache[field] = newl
--- a/hgext/histedit.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/histedit.py	Sun Oct 06 09:48:39 2019 -0400
@@ -241,30 +241,30 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 configitem(
-    'experimental', 'histedit.autoverb', default=False,
+    b'experimental', b'histedit.autoverb', default=False,
 )
 configitem(
-    'histedit', 'defaultrev', default=None,
+    b'histedit', b'defaultrev', default=None,
 )
 configitem(
-    'histedit', 'dropmissing', default=False,
+    b'histedit', b'dropmissing', default=False,
 )
 configitem(
-    'histedit', 'linelen', default=80,
+    b'histedit', b'linelen', default=80,
 )
 configitem(
-    'histedit', 'singletransaction', default=False,
+    b'histedit', b'singletransaction', default=False,
 )
 configitem(
-    'ui', 'interface.histedit', default=None,
+    b'ui', b'interface.histedit', default=None,
 )
-configitem('histedit', 'summary-template', default='{rev} {desc|firstline}')
+configitem(b'histedit', b'summary-template', default=b'{rev} {desc|firstline}')
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 actiontable = {}
 primaryactions = set()
@@ -298,11 +298,11 @@
 
     def addverb(v):
         a = actiontable[v]
-        lines = a.message.split("\n")
+        lines = a.message.split(b"\n")
         if len(a.verbs):
-            v = ', '.join(sorted(a.verbs, key=lambda v: len(v)))
-        actions.append(" %s = %s" % (v, lines[0]))
-        actions.extend(['  %s' for l in lines[1:]])
+            v = b', '.join(sorted(a.verbs, key=lambda v: len(v)))
+        actions.append(b" %s = %s" % (v, lines[0]))
+        actions.extend([b'  %s' for l in lines[1:]])
 
     for v in (
         sorted(primaryactions)
@@ -310,18 +310,18 @@
         + sorted(tertiaryactions)
     ):
         addverb(v)
-    actions.append('')
+    actions.append(b'')
 
     hints = []
-    if ui.configbool('histedit', 'dropmissing'):
+    if ui.configbool(b'histedit', b'dropmissing'):
         hints.append(
-            "Deleting a changeset from the list "
-            "will DISCARD it from the edited history!"
+            b"Deleting a changeset from the list "
+            b"will DISCARD it from the edited history!"
         )
 
-    lines = (intro % (first, last)).split('\n') + actions + hints
-
-    return ''.join(['# %s\n' % l if l else '#\n' for l in lines])
+    lines = (intro % (first, last)).split(b'\n') + actions + hints
+
+    return b''.join([b'# %s\n' % l if l else b'#\n' for l in lines])
 
 
 class histeditstate(object):
@@ -334,80 +334,80 @@
         self.lock = None
         self.wlock = None
         self.backupfile = None
-        self.stateobj = statemod.cmdstate(repo, 'histedit-state')
+        self.stateobj = statemod.cmdstate(repo, b'histedit-state')
         self.replacements = []
 
     def read(self):
         """Load histedit state from disk and set fields appropriately."""
         if not self.stateobj.exists():
-            cmdutil.wrongtooltocontinue(self.repo, _('histedit'))
+            cmdutil.wrongtooltocontinue(self.repo, _(b'histedit'))
 
         data = self._read()
 
-        self.parentctxnode = data['parentctxnode']
-        actions = parserules(data['rules'], self)
+        self.parentctxnode = data[b'parentctxnode']
+        actions = parserules(data[b'rules'], self)
         self.actions = actions
-        self.keep = data['keep']
-        self.topmost = data['topmost']
-        self.replacements = data['replacements']
-        self.backupfile = data['backupfile']
+        self.keep = data[b'keep']
+        self.topmost = data[b'topmost']
+        self.replacements = data[b'replacements']
+        self.backupfile = data[b'backupfile']
 
     def _read(self):
-        fp = self.repo.vfs.read('histedit-state')
-        if fp.startswith('v1\n'):
+        fp = self.repo.vfs.read(b'histedit-state')
+        if fp.startswith(b'v1\n'):
             data = self._load()
             parentctxnode, rules, keep, topmost, replacements, backupfile = data
         else:
             data = pickle.loads(fp)
             parentctxnode, rules, keep, topmost, replacements = data
             backupfile = None
-        rules = "\n".join(["%s %s" % (verb, rest) for [verb, rest] in rules])
+        rules = b"\n".join([b"%s %s" % (verb, rest) for [verb, rest] in rules])
 
         return {
-            'parentctxnode': parentctxnode,
-            "rules": rules,
-            "keep": keep,
-            "topmost": topmost,
-            "replacements": replacements,
-            "backupfile": backupfile,
+            b'parentctxnode': parentctxnode,
+            b"rules": rules,
+            b"keep": keep,
+            b"topmost": topmost,
+            b"replacements": replacements,
+            b"backupfile": backupfile,
         }
 
     def write(self, tr=None):
         if tr:
             tr.addfilegenerator(
-                'histedit-state',
-                ('histedit-state',),
+                b'histedit-state',
+                (b'histedit-state',),
                 self._write,
-                location='plain',
+                location=b'plain',
             )
         else:
-            with self.repo.vfs("histedit-state", "w") as f:
+            with self.repo.vfs(b"histedit-state", b"w") as f:
                 self._write(f)
 
     def _write(self, fp):
-        fp.write('v1\n')
-        fp.write('%s\n' % node.hex(self.parentctxnode))
-        fp.write('%s\n' % node.hex(self.topmost))
-        fp.write('%s\n' % ('True' if self.keep else 'False'))
-        fp.write('%d\n' % len(self.actions))
+        fp.write(b'v1\n')
+        fp.write(b'%s\n' % node.hex(self.parentctxnode))
+        fp.write(b'%s\n' % node.hex(self.topmost))
+        fp.write(b'%s\n' % (b'True' if self.keep else b'False'))
+        fp.write(b'%d\n' % len(self.actions))
         for action in self.actions:
-            fp.write('%s\n' % action.tostate())
-        fp.write('%d\n' % len(self.replacements))
+            fp.write(b'%s\n' % action.tostate())
+        fp.write(b'%d\n' % len(self.replacements))
         for replacement in self.replacements:
             fp.write(
-                '%s%s\n'
+                b'%s%s\n'
                 % (
                     node.hex(replacement[0]),
-                    ''.join(node.hex(r) for r in replacement[1]),
+                    b''.join(node.hex(r) for r in replacement[1]),
                 )
             )
         backupfile = self.backupfile
         if not backupfile:
-            backupfile = ''
-        fp.write('%s\n' % backupfile)
+            backupfile = b''
+        fp.write(b'%s\n' % backupfile)
 
     def _load(self):
-        fp = self.repo.vfs('histedit-state', 'r')
+        fp = self.repo.vfs(b'histedit-state', b'r')
         lines = [l[:-1] for l in fp.readlines()]
 
         index = 0
@@ -420,7 +420,7 @@
         topmost = node.bin(lines[index])
         index += 1
 
-        keep = lines[index] == 'True'
+        keep = lines[index] == b'True'
         index += 1
 
         # Rules
@@ -457,10 +457,10 @@
 
     def clear(self):
         if self.inprogress():
-            self.repo.vfs.unlink('histedit-state')
+            self.repo.vfs.unlink(b'histedit-state')
 
     def inprogress(self):
-        return self.repo.vfs.exists('histedit-state')
+        return self.repo.vfs.exists(b'histedit-state')
 
 
 class histeditaction(object):
@@ -473,7 +473,7 @@
     def fromrule(cls, state, rule):
         """Parses the given rule, returning an instance of the histeditaction.
         """
-        ruleid = rule.strip().split(' ', 1)[0]
+        ruleid = rule.strip().split(b' ', 1)[0]
         # ruleid can be anything from rev numbers, hashes, "bookmarks" etc
         # Check for validation of rule ids and get the rulehash
         try:
@@ -484,7 +484,7 @@
                 rulehash = _ctx.hex()
                 rev = node.bin(rulehash)
             except error.RepoLookupError:
-                raise error.ParseError(_("invalid changeset %s") % ruleid)
+                raise error.ParseError(_(b"invalid changeset %s") % ruleid)
         return cls(state, rev)
 
     def verify(self, prev, expected, seen):
@@ -493,21 +493,22 @@
         ha = node.hex(self.node)
         self.node = scmutil.resolvehexnodeidprefix(repo, ha)
         if self.node is None:
-            raise error.ParseError(_('unknown changeset %s listed') % ha[:12])
+            raise error.ParseError(_(b'unknown changeset %s listed') % ha[:12])
         self._verifynodeconstraints(prev, expected, seen)
 
     def _verifynodeconstraints(self, prev, expected, seen):
         # by default command need a node in the edited list
         if self.node not in expected:
             raise error.ParseError(
-                _('%s "%s" changeset was not a candidate')
+                _(b'%s "%s" changeset was not a candidate')
                 % (self.verb, node.short(self.node)),
-                hint=_('only use listed changesets'),
+                hint=_(b'only use listed changesets'),
             )
         # and only one command per node
         if self.node in seen:
             raise error.ParseError(
-                _('duplicated command for changeset %s') % node.short(self.node)
+                _(b'duplicated command for changeset %s')
+                % node.short(self.node)
             )
 
     def torule(self):
@@ -520,15 +521,15 @@
         ui = self.repo.ui
         summary = (
             cmdutil.rendertemplate(
-                ctx, ui.config('histedit', 'summary-template')
+                ctx, ui.config(b'histedit', b'summary-template')
             )
-            or ''
+            or b''
         )
         summary = summary.splitlines()[0]
-        line = '%s %s %s' % (self.verb, ctx, summary)
+        line = b'%s %s %s' % (self.verb, ctx, summary)
         # trim to 75 columns by default so it's not stupidly wide in my editor
         # (the 5 more are left for verb)
-        maxlen = self.repo.ui.configint('histedit', 'linelen')
+        maxlen = self.repo.ui.configint(b'histedit', b'linelen')
         maxlen = max(maxlen, 22)  # avoid truncating hash
         return stringutil.ellipsis(line, maxlen)
 
@@ -536,7 +537,7 @@
         """Print an action in format used by histedit state files
            (the first line is a verb, the remainder is the second)
         """
-        return "%s\n%s" % (self.verb, node.hex(self.node))
+        return b"%s\n%s" % (self.verb, node.hex(self.node))
 
     def run(self):
         """Runs the action. The default behavior is simply apply the action's
@@ -557,9 +558,9 @@
         repo.dirstate.setbranch(rulectx.branch())
         if stats.unresolvedcount:
             raise error.InterventionRequired(
-                _('Fix up the change (%s %s)')
+                _(b'Fix up the change (%s %s)')
                 % (self.verb, node.short(self.node)),
-                hint=_('hg histedit --continue to resume'),
+                hint=_(b'hg histedit --continue to resume'),
             )
 
     def continuedirty(self):
@@ -570,7 +571,7 @@
 
         editor = self.commiteditor()
         commit = commitfuncfor(repo, rulectx)
-        if repo.ui.configbool('rewrite', 'update-timestamp'):
+        if repo.ui.configbool(b'rewrite', b'update-timestamp'):
             date = dateutil.makedate()
         else:
             date = rulectx.date()
@@ -590,10 +591,10 @@
         """Continues the action when the working copy is clean. The default
         behavior is to accept the current commit as the new version of the
         rulectx."""
-        ctx = self.repo['.']
+        ctx = self.repo[b'.']
         if ctx.node() == self.state.parentctxnode:
             self.repo.ui.warn(
-                _('%s: skipping changeset (no changes)\n')
+                _(b'%s: skipping changeset (no changes)\n')
                 % node.short(self.node)
             )
             return ctx, [(self.node, tuple())]
@@ -616,10 +617,10 @@
     phasemin = src.phase()
 
     def commitfunc(**kwargs):
-        overrides = {('phases', 'new-commit'): phasemin}
-        with repo.ui.configoverride(overrides, 'histedit'):
+        overrides = {(b'phases', b'new-commit'): phasemin}
+        with repo.ui.configoverride(overrides, b'histedit'):
             extra = kwargs.get(r'extra', {}).copy()
-            extra['histedit_source'] = src.hex()
+            extra[b'histedit_source'] = src.hex()
             kwargs[r'extra'] = extra
             return repo.commit(**kwargs)
 
@@ -640,11 +641,11 @@
         try:
             # ui.forcemerge is an internal variable, do not document
             repo.ui.setconfig(
-                'ui', 'forcemerge', opts.get('tool', ''), 'histedit'
+                b'ui', b'forcemerge', opts.get(b'tool', b''), b'histedit'
             )
-            stats = mergemod.graft(repo, ctx, ctx.p1(), ['local', 'histedit'])
+            stats = mergemod.graft(repo, ctx, ctx.p1(), [b'local', b'histedit'])
         finally:
-            repo.ui.setconfig('ui', 'forcemerge', '', 'histedit')
+            repo.ui.setconfig(b'ui', b'forcemerge', b'', b'histedit')
     return stats
 
 
@@ -658,13 +659,13 @@
     Commit message is edited in all cases.
 
     This function works in memory."""
-    ctxs = list(repo.set('%d::%d', firstctx.rev(), lastctx.rev()))
+    ctxs = list(repo.set(b'%d::%d', firstctx.rev(), lastctx.rev()))
     if not ctxs:
         return None
     for c in ctxs:
         if not c.mutable():
             raise error.ParseError(
-                _("cannot fold into public change %s") % node.short(c.node())
+                _(b"cannot fold into public change %s") % node.short(c.node())
             )
     base = firstctx.p1()
 
@@ -691,25 +692,25 @@
                 ctx,
                 fctx.path(),
                 fctx.data(),
-                islink='l' in flags,
-                isexec='x' in flags,
+                islink=b'l' in flags,
+                isexec=b'x' in flags,
                 copysource=copied.get(path),
             )
             return mctx
         return None
 
-    if commitopts.get('message'):
-        message = commitopts['message']
+    if commitopts.get(b'message'):
+        message = commitopts[b'message']
     else:
         message = firstctx.description()
-    user = commitopts.get('user')
-    date = commitopts.get('date')
-    extra = commitopts.get('extra')
+    user = commitopts.get(b'user')
+    date = commitopts.get(b'date')
+    extra = commitopts.get(b'extra')
 
     parents = (firstctx.p1().node(), firstctx.p2().node())
     editor = None
     if not skipprompt:
-        editor = cmdutil.getcommiteditor(edit=True, editform='histedit.fold')
+        editor = cmdutil.getcommiteditor(edit=True, editform=b'histedit.fold')
     new = context.memctx(
         repo,
         parents=parents,
@@ -730,10 +731,10 @@
 
 def abortdirty():
     raise error.Abort(
-        _('working copy has pending changes'),
+        _(b'working copy has pending changes'),
         hint=_(
-            'amend, commit, or revert them and run histedit '
-            '--continue, or abort with histedit --abort'
+            b'amend, commit, or revert them and run histedit '
+            b'--continue, or abort with histedit --abort'
         ),
     )
 
@@ -761,18 +762,18 @@
     return wrap
 
 
-@action(['pick', 'p'], _('use commit'), priority=True)
+@action([b'pick', b'p'], _(b'use commit'), priority=True)
 class pick(histeditaction):
     def run(self):
         rulectx = self.repo[self.node]
         if rulectx.p1().node() == self.state.parentctxnode:
-            self.repo.ui.debug('node %s unchanged\n' % node.short(self.node))
+            self.repo.ui.debug(b'node %s unchanged\n' % node.short(self.node))
             return rulectx, []
 
         return super(pick, self).run()
 
 
-@action(['edit', 'e'], _('use commit, but stop for amending'), priority=True)
+@action([b'edit', b'e'], _(b'use commit, but stop for amending'), priority=True)
 class edit(histeditaction):
     def run(self):
         repo = self.repo
@@ -780,16 +781,16 @@
         hg.update(repo, self.state.parentctxnode, quietempty=True)
         applychanges(repo.ui, repo, rulectx, {})
         raise error.InterventionRequired(
-            _('Editing (%s), you may commit or record as needed now.')
+            _(b'Editing (%s), you may commit or record as needed now.')
             % node.short(self.node),
-            hint=_('hg histedit --continue to resume'),
+            hint=_(b'hg histedit --continue to resume'),
         )
 
     def commiteditor(self):
-        return cmdutil.getcommiteditor(edit=True, editform='histedit.edit')
-
-
-@action(['fold', 'f'], _('use commit, but combine it with the one above'))
+        return cmdutil.getcommiteditor(edit=True, editform=b'histedit.edit')
+
+
+@action([b'fold', b'f'], _(b'use commit, but combine it with the one above'))
 class fold(histeditaction):
     def verify(self, prev, expected, seen):
         """ Verifies semantic correctness of the fold rule"""
@@ -797,13 +798,13 @@
         repo = self.repo
         if not prev:
             c = repo[self.node].p1()
-        elif not prev.verb in ('pick', 'base'):
+        elif not prev.verb in (b'pick', b'base'):
             return
         else:
             c = repo[prev.node]
         if not c.mutable():
             raise error.ParseError(
-                _("cannot fold into public change %s") % node.short(c.node())
+                _(b"cannot fold into public change %s") % node.short(c.node())
             )
 
     def continuedirty(self):
@@ -812,7 +813,7 @@
 
         commit = commitfuncfor(repo, rulectx)
         commit(
-            text='fold-temp-revision %s' % node.short(self.node),
+            text=b'fold-temp-revision %s' % node.short(self.node),
             user=rulectx.user(),
             date=rulectx.date(),
             extra=rulectx.extra(),
@@ -820,23 +821,23 @@
 
     def continueclean(self):
         repo = self.repo
-        ctx = repo['.']
+        ctx = repo[b'.']
         rulectx = repo[self.node]
         parentctxnode = self.state.parentctxnode
         if ctx.node() == parentctxnode:
-            repo.ui.warn(_('%s: empty changeset\n') % node.short(self.node))
+            repo.ui.warn(_(b'%s: empty changeset\n') % node.short(self.node))
             return ctx, [(self.node, (parentctxnode,))]
 
         parentctx = repo[parentctxnode]
         newcommits = set(
             c.node()
-            for c in repo.set('(%d::. - %d)', parentctx.rev(), parentctx.rev())
+            for c in repo.set(b'(%d::. - %d)', parentctx.rev(), parentctx.rev())
         )
         if not newcommits:
             repo.ui.warn(
                 _(
-                    '%s: cannot fold - working copy is not a '
-                    'descendant of previous commit %s\n'
+                    b'%s: cannot fold - working copy is not a '
+                    b'descendant of previous commit %s\n'
                 )
                 % (node.short(self.node), node.short(parentctxnode))
             )
@@ -879,38 +880,38 @@
         hg.updaterepo(repo, parent, overwrite=False)
         ### prepare new commit data
         commitopts = {}
-        commitopts['user'] = ctx.user()
+        commitopts[b'user'] = ctx.user()
         # commit message
         if not self.mergedescs():
             newmessage = ctx.description()
         else:
             newmessage = (
-                '\n***\n'.join(
+                b'\n***\n'.join(
                     [ctx.description()]
                     + [repo[r].description() for r in internalchanges]
                     + [oldctx.description()]
                 )
-                + '\n'
+                + b'\n'
             )
-        commitopts['message'] = newmessage
+        commitopts[b'message'] = newmessage
         # date
         if self.firstdate():
-            commitopts['date'] = ctx.date()
+            commitopts[b'date'] = ctx.date()
         else:
-            commitopts['date'] = max(ctx.date(), oldctx.date())
+            commitopts[b'date'] = max(ctx.date(), oldctx.date())
         # if date is to be updated to current
-        if ui.configbool('rewrite', 'update-timestamp'):
-            commitopts['date'] = dateutil.makedate()
+        if ui.configbool(b'rewrite', b'update-timestamp'):
+            commitopts[b'date'] = dateutil.makedate()
 
         extra = ctx.extra().copy()
         # histedit_source
         # note: ctx is likely a temporary commit but that the best we can do
         #       here. This is sufficient to solve issue3681 anyway.
-        extra['histedit_source'] = '%s,%s' % (ctx.hex(), oldctx.hex())
-        commitopts['extra'] = extra
+        extra[b'histedit_source'] = b'%s,%s' % (ctx.hex(), oldctx.hex())
+        commitopts[b'extra'] = extra
         phasemin = max(ctx.phase(), oldctx.phase())
-        overrides = {('phases', 'new-commit'): phasemin}
-        with repo.ui.configoverride(overrides, 'histedit'):
+        overrides = {(b'phases', b'new-commit'): phasemin}
+        with repo.ui.configoverride(overrides, b'histedit'):
             n = collapse(
                 repo,
                 ctx,
@@ -932,12 +933,12 @@
 
 
 @action(
-    ['base', 'b'],
-    _('checkout changeset and apply further changesets from there'),
+    [b'base', b'b'],
+    _(b'checkout changeset and apply further changesets from there'),
 )
 class base(histeditaction):
     def run(self):
-        if self.repo['.'].node() != self.node:
+        if self.repo[b'.'].node() != self.node:
             mergemod.update(self.repo, self.node, branchmerge=False, force=True)
         return self.continueclean()
 
@@ -945,21 +946,21 @@
         abortdirty()
 
     def continueclean(self):
-        basectx = self.repo['.']
+        basectx = self.repo[b'.']
         return basectx, []
 
     def _verifynodeconstraints(self, prev, expected, seen):
         # base can only be use with a node not in the edited set
         if self.node in expected:
-            msg = _('%s "%s" changeset was an edited list candidate')
+            msg = _(b'%s "%s" changeset was an edited list candidate')
             raise error.ParseError(
                 msg % (self.verb, node.short(self.node)),
-                hint=_('base must only use unlisted changesets'),
+                hint=_(b'base must only use unlisted changesets'),
             )
 
 
 @action(
-    ['_multifold'],
+    [b'_multifold'],
     _(
         """fold subclass used for when multiple folds happen in a row
 
@@ -978,8 +979,8 @@
 
 
 @action(
-    ["roll", "r"],
-    _("like fold, but discard this commit's description and date"),
+    [b"roll", b"r"],
+    _(b"like fold, but discard this commit's description and date"),
 )
 class rollup(fold):
     def mergedescs(self):
@@ -992,7 +993,7 @@
         return True
 
 
-@action(["drop", "d"], _('remove commit from history'))
+@action([b"drop", b"d"], _(b'remove commit from history'))
 class drop(histeditaction):
     def run(self):
         parentctx = self.repo[self.state.parentctxnode]
@@ -1000,13 +1001,13 @@
 
 
 @action(
-    ["mess", "m"],
-    _('edit commit message without changing commit content'),
+    [b"mess", b"m"],
+    _(b'edit commit message without changing commit content'),
     priority=True,
 )
 class message(histeditaction):
     def commiteditor(self):
-        return cmdutil.getcommiteditor(edit=True, editform='histedit.mess')
+        return cmdutil.getcommiteditor(edit=True, editform=b'histedit.mess')
 
 
 def findoutgoing(ui, repo, remote=None, force=False, opts=None):
@@ -1015,9 +1016,9 @@
     Used by initialization code"""
     if opts is None:
         opts = {}
-    dest = ui.expandpath(remote or 'default-push', remote or 'default')
+    dest = ui.expandpath(remote or b'default-push', remote or b'default')
     dest, branches = hg.parseurl(dest, None)[:2]
-    ui.status(_('comparing with %s\n') % util.hidepassword(dest))
+    ui.status(_(b'comparing with %s\n') % util.hidepassword(dest))
 
     revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
     other = hg.peer(repo, opts, dest)
@@ -1027,11 +1028,11 @@
 
     outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force)
     if not outgoing.missing:
-        raise error.Abort(_('no outgoing ancestors'))
-    roots = list(repo.revs("roots(%ln)", outgoing.missing))
+        raise error.Abort(_(b'no outgoing ancestors'))
+    roots = list(repo.revs(b"roots(%ln)", outgoing.missing))
     if len(roots) > 1:
-        msg = _('there are ambiguous outgoing revisions')
-        hint = _("see 'hg help histedit' for more detail")
+        msg = _(b'there are ambiguous outgoing revisions')
+        hint = _(b"see 'hg help histedit' for more detail")
         raise error.Abort(msg, hint=hint)
     return repo[roots[0]].node()
 
@@ -1042,10 +1043,10 @@
 except ImportError:
     curses = None
 
-KEY_LIST = ['pick', 'edit', 'fold', 'drop', 'mess', 'roll']
+KEY_LIST = [b'pick', b'edit', b'fold', b'drop', b'mess', b'roll']
 ACTION_LABELS = {
-    'fold': '^fold',
-    'roll': '^roll',
+    b'fold': b'^fold',
+    b'roll': b'^roll',
 }
 
 COLOR_HELP, COLOR_SELECTED, COLOR_OK, COLOR_WARN, COLOR_CURRENT = 1, 2, 3, 4, 5
@@ -1056,56 +1057,56 @@
 MODE_INIT, MODE_PATCH, MODE_RULES, MODE_HELP = 0, 1, 2, 3
 
 KEYTABLE = {
-    'global': {
-        'h': 'next-action',
-        'KEY_RIGHT': 'next-action',
-        'l': 'prev-action',
-        'KEY_LEFT': 'prev-action',
-        'q': 'quit',
-        'c': 'histedit',
-        'C': 'histedit',
-        'v': 'showpatch',
-        '?': 'help',
+    b'global': {
+        b'h': b'next-action',
+        b'KEY_RIGHT': b'next-action',
+        b'l': b'prev-action',
+        b'KEY_LEFT': b'prev-action',
+        b'q': b'quit',
+        b'c': b'histedit',
+        b'C': b'histedit',
+        b'v': b'showpatch',
+        b'?': b'help',
     },
     MODE_RULES: {
-        'd': 'action-drop',
-        'e': 'action-edit',
-        'f': 'action-fold',
-        'm': 'action-mess',
-        'p': 'action-pick',
-        'r': 'action-roll',
-        ' ': 'select',
-        'j': 'down',
-        'k': 'up',
-        'KEY_DOWN': 'down',
-        'KEY_UP': 'up',
-        'J': 'move-down',
-        'K': 'move-up',
-        'KEY_NPAGE': 'move-down',
-        'KEY_PPAGE': 'move-up',
-        '0': 'goto',  # Used for 0..9
+        b'd': b'action-drop',
+        b'e': b'action-edit',
+        b'f': b'action-fold',
+        b'm': b'action-mess',
+        b'p': b'action-pick',
+        b'r': b'action-roll',
+        b' ': b'select',
+        b'j': b'down',
+        b'k': b'up',
+        b'KEY_DOWN': b'down',
+        b'KEY_UP': b'up',
+        b'J': b'move-down',
+        b'K': b'move-up',
+        b'KEY_NPAGE': b'move-down',
+        b'KEY_PPAGE': b'move-up',
+        b'0': b'goto',  # Used for 0..9
     },
     MODE_PATCH: {
-        ' ': 'page-down',
-        'KEY_NPAGE': 'page-down',
-        'KEY_PPAGE': 'page-up',
-        'j': 'line-down',
-        'k': 'line-up',
-        'KEY_DOWN': 'line-down',
-        'KEY_UP': 'line-up',
-        'J': 'down',
-        'K': 'up',
+        b' ': b'page-down',
+        b'KEY_NPAGE': b'page-down',
+        b'KEY_PPAGE': b'page-up',
+        b'j': b'line-down',
+        b'k': b'line-up',
+        b'KEY_DOWN': b'line-down',
+        b'KEY_UP': b'line-up',
+        b'J': b'down',
+        b'K': b'up',
     },
     MODE_HELP: {},
 }
 
 
 def screen_size():
-    return struct.unpack('hh', fcntl.ioctl(1, termios.TIOCGWINSZ, '    '))
+    return struct.unpack(b'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, b'    '))
 
 
 class histeditrule(object):
-    def __init__(self, ctx, pos, action='pick'):
+    def __init__(self, ctx, pos, action=b'pick'):
         self.ctx = ctx
         self.action = action
         self.origpos = pos
@@ -1128,9 +1129,9 @@
         h = self.ctx.hex()[0:12]
         r = self.ctx.rev()
         desc = self.ctx.description().splitlines()[0].strip()
-        if self.action == 'roll':
-            desc = ''
-        return "#{0:<2} {1:<6} {2}:{3}   {4}".format(
+        if self.action == b'roll':
+            desc = b''
+        return b"#{0:<2} {1:<6} {2}:{3}   {4}".format(
             self.origpos, action, r, h, desc
         )
 
@@ -1149,39 +1150,39 @@
 def movecursor(state, oldpos, newpos):
     '''Change the rule/changeset that the cursor is pointing to, regardless of
     current mode (you can switch between patches from the view patch window).'''
-    state['pos'] = newpos
-
-    mode, _ = state['mode']
+    state[b'pos'] = newpos
+
+    mode, _ = state[b'mode']
     if mode == MODE_RULES:
         # Scroll through the list by updating the view for MODE_RULES, so that
         # even if we are not currently viewing the rules, switching back will
         # result in the cursor's rule being visible.
-        modestate = state['modes'][MODE_RULES]
-        if newpos < modestate['line_offset']:
-            modestate['line_offset'] = newpos
-        elif newpos > modestate['line_offset'] + state['page_height'] - 1:
-            modestate['line_offset'] = newpos - state['page_height'] + 1
+        modestate = state[b'modes'][MODE_RULES]
+        if newpos < modestate[b'line_offset']:
+            modestate[b'line_offset'] = newpos
+        elif newpos > modestate[b'line_offset'] + state[b'page_height'] - 1:
+            modestate[b'line_offset'] = newpos - state[b'page_height'] + 1
 
     # Reset the patch view region to the top of the new patch.
-    state['modes'][MODE_PATCH]['line_offset'] = 0
+    state[b'modes'][MODE_PATCH][b'line_offset'] = 0
 
 
 def changemode(state, mode):
-    curmode, _ = state['mode']
-    state['mode'] = (mode, curmode)
+    curmode, _ = state[b'mode']
+    state[b'mode'] = (mode, curmode)
     if mode == MODE_PATCH:
-        state['modes'][MODE_PATCH]['patchcontents'] = patchcontents(state)
+        state[b'modes'][MODE_PATCH][b'patchcontents'] = patchcontents(state)
 
 
 def makeselection(state, pos):
-    state['selected'] = pos
+    state[b'selected'] = pos
 
 
 def swap(state, oldpos, newpos):
     """Swap two positions and calculate necessary conflicts in
     O(|newpos-oldpos|) time"""
 
-    rules = state['rules']
+    rules = state[b'rules']
     assert 0 <= oldpos < len(rules) and 0 <= newpos < len(rules)
 
     rules[oldpos], rules[newpos] = rules[newpos], rules[oldpos]
@@ -1196,13 +1197,13 @@
         rules[newpos].checkconflicts(rules[r])
         rules[oldpos].checkconflicts(rules[r])
 
-    if state['selected']:
+    if state[b'selected']:
         makeselection(state, newpos)
 
 
 def changeaction(state, pos, action):
     """Change the action state on the given position to the new action"""
-    rules = state['rules']
+    rules = state[b'rules']
     assert 0 <= pos < len(rules)
     rules[pos].action = action
 
@@ -1210,7 +1211,7 @@
 def cycleaction(state, pos, next=False):
     """Changes the action state the next or the previous action from
     the action list"""
-    rules = state['rules']
+    rules = state[b'rules']
     assert 0 <= pos < len(rules)
     current = rules[pos].action
 
@@ -1227,17 +1228,17 @@
 def changeview(state, delta, unit):
     '''Change the region of whatever is being viewed (a patch or the list of
     changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'.'''
-    mode, _ = state['mode']
+    mode, _ = state[b'mode']
     if mode != MODE_PATCH:
         return
-    mode_state = state['modes'][mode]
-    num_lines = len(mode_state['patchcontents'])
-    page_height = state['page_height']
-    unit = page_height if unit == 'page' else 1
+    mode_state = state[b'modes'][mode]
+    num_lines = len(mode_state[b'patchcontents'])
+    page_height = state[b'page_height']
+    unit = page_height if unit == b'page' else 1
     num_pages = 1 + (num_lines - 1) / page_height
     max_offset = (num_pages - 1) * page_height
-    newline = mode_state['line_offset'] + delta * unit
-    mode_state['line_offset'] = max(0, min(max_offset, newline))
+    newline = mode_state[b'line_offset'] + delta * unit
+    mode_state[b'line_offset'] = max(0, min(max_offset, newline))
 
 
 def event(state, ch):
@@ -1246,60 +1247,62 @@
     This takes the current state and based on the current character input from
     the user we change the state.
     """
-    selected = state['selected']
-    oldpos = state['pos']
-    rules = state['rules']
-
-    if ch in (curses.KEY_RESIZE, "KEY_RESIZE"):
+    selected = state[b'selected']
+    oldpos = state[b'pos']
+    rules = state[b'rules']
+
+    if ch in (curses.KEY_RESIZE, b"KEY_RESIZE"):
         return E_RESIZE
 
     lookup_ch = ch
-    if '0' <= ch <= '9':
-        lookup_ch = '0'
-
-    curmode, prevmode = state['mode']
-    action = KEYTABLE[curmode].get(lookup_ch, KEYTABLE['global'].get(lookup_ch))
+    if b'0' <= ch <= b'9':
+        lookup_ch = b'0'
+
+    curmode, prevmode = state[b'mode']
+    action = KEYTABLE[curmode].get(
+        lookup_ch, KEYTABLE[b'global'].get(lookup_ch)
+    )
     if action is None:
         return
-    if action in ('down', 'move-down'):
+    if action in (b'down', b'move-down'):
         newpos = min(oldpos + 1, len(rules) - 1)
         movecursor(state, oldpos, newpos)
-        if selected is not None or action == 'move-down':
+        if selected is not None or action == b'move-down':
             swap(state, oldpos, newpos)
-    elif action in ('up', 'move-up'):
+    elif action in (b'up', b'move-up'):
         newpos = max(0, oldpos - 1)
         movecursor(state, oldpos, newpos)
-        if selected is not None or action == 'move-up':
+        if selected is not None or action == b'move-up':
             swap(state, oldpos, newpos)
-    elif action == 'next-action':
+    elif action == b'next-action':
         cycleaction(state, oldpos, next=True)
-    elif action == 'prev-action':
+    elif action == b'prev-action':
         cycleaction(state, oldpos, next=False)
-    elif action == 'select':
+    elif action == b'select':
         selected = oldpos if selected is None else None
         makeselection(state, selected)
-    elif action == 'goto' and int(ch) < len(rules) and len(rules) <= 10:
+    elif action == b'goto' and int(ch) < len(rules) and len(rules) <= 10:
         newrule = next((r for r in rules if r.origpos == int(ch)))
         movecursor(state, oldpos, newrule.pos)
         if selected is not None:
             swap(state, oldpos, newrule.pos)
-    elif action.startswith('action-'):
+    elif action.startswith(b'action-'):
         changeaction(state, oldpos, action[7:])
-    elif action == 'showpatch':
+    elif action == b'showpatch':
         changemode(state, MODE_PATCH if curmode != MODE_PATCH else prevmode)
-    elif action == 'help':
+    elif action == b'help':
         changemode(state, MODE_HELP if curmode != MODE_HELP else prevmode)
-    elif action == 'quit':
+    elif action == b'quit':
         return E_QUIT
-    elif action == 'histedit':
+    elif action == b'histedit':
         return E_HISTEDIT
-    elif action == 'page-down':
+    elif action == b'page-down':
         return E_PAGEDOWN
-    elif action == 'page-up':
+    elif action == b'page-up':
         return E_PAGEUP
-    elif action == 'line-down':
+    elif action == b'line-down':
         return E_LINEDOWN
-    elif action == 'line-up':
+    elif action == b'line-up':
         return E_LINEUP
 
 
@@ -1308,7 +1311,7 @@
     our list of rules"""
     commands = []
     for rules in rules:
-        commands.append("{0} {1}\n".format(rules.action, rules.ctx))
+        commands.append(b"{0} {1}\n".format(rules.action, rules.ctx))
     return commands
 
 
@@ -1317,7 +1320,7 @@
     whitespace characters, so that the color appears on the whole line"""
     maxy, maxx = win.getmaxyx()
     length = maxx - 1 - x
-    line = ("{0:<%d}" % length).format(str(line).strip())[:length]
+    line = (b"{0:<%d}" % length).format(str(line).strip())[:length]
     if y < 0:
         y = maxy + y
     if x < 0:
@@ -1331,23 +1334,23 @@
 def _trunc_head(line, n):
     if len(line) <= n:
         return line
-    return '> ' + line[-(n - 2) :]
+    return b'> ' + line[-(n - 2) :]
 
 
 def _trunc_tail(line, n):
     if len(line) <= n:
         return line
-    return line[: n - 2] + ' >'
+    return line[: n - 2] + b' >'
 
 
 def patchcontents(state):
-    repo = state['repo']
-    rule = state['rules'][state['pos']]
+    repo = state[b'repo']
+    rule = state[b'rules'][state[b'pos']]
     displayer = logcmdutil.changesetdisplayer(
-        repo.ui, repo, {"patch": True, "template": "status"}, buffered=True
+        repo.ui, repo, {b"patch": True, b"template": b"status"}, buffered=True
     )
-    overrides = {('ui', 'verbose'): True}
-    with repo.ui.configoverride(overrides, source='histedit'):
+    overrides = {(b'ui', b'verbose'): True}
+    with repo.ui.configoverride(overrides, source=b'histedit'):
         displayer.show(rule.ctx)
         displayer.close()
     return displayer.hunk[rule.ctx.rev()].splitlines()
@@ -1378,8 +1381,8 @@
     def rendercommit(win, state):
         """Renders the commit window that shows the log of the current selected
         commit"""
-        pos = state['pos']
-        rules = state['rules']
+        pos = state[b'pos']
+        rules = state[b'rules']
         rule = rules[pos]
 
         ctx = rule.ctx
@@ -1388,20 +1391,20 @@
         maxy, maxx = win.getmaxyx()
         length = maxx - 3
 
-        line = "changeset: {0}:{1:<12}".format(ctx.rev(), ctx)
+        line = b"changeset: {0}:{1:<12}".format(ctx.rev(), ctx)
         win.addstr(1, 1, line[:length])
 
-        line = "user:      {0}".format(ctx.user())
+        line = b"user:      {0}".format(ctx.user())
         win.addstr(2, 1, line[:length])
 
         bms = repo.nodebookmarks(ctx.node())
-        line = "bookmark:  {0}".format(' '.join(bms))
+        line = b"bookmark:  {0}".format(b' '.join(bms))
         win.addstr(3, 1, line[:length])
 
-        line = "summary:   {0}".format(ctx.description().splitlines()[0])
+        line = b"summary:   {0}".format(ctx.description().splitlines()[0])
         win.addstr(4, 1, line[:length])
 
-        line = "files:     "
+        line = b"files:     "
         win.addstr(5, 1, line)
         fnx = 1 + len(line)
         fnmaxx = length - fnx + 1
@@ -1410,7 +1413,7 @@
         files = ctx.files()
         for i, line1 in enumerate(files):
             if len(files) > fnmaxn and i == fnmaxn - 1:
-                win.addstr(y, fnx, _trunc_tail(','.join(files[i:]), fnmaxx))
+                win.addstr(y, fnx, _trunc_tail(b','.join(files[i:]), fnmaxx))
                 y = y + 1
                 break
             win.addstr(y, fnx, _trunc_head(line1, fnmaxx))
@@ -1418,22 +1421,22 @@
 
         conflicts = rule.conflicts
         if len(conflicts) > 0:
-            conflictstr = ','.join(map(lambda r: str(r.ctx), conflicts))
-            conflictstr = "changed files overlap with {0}".format(conflictstr)
+            conflictstr = b','.join(map(lambda r: str(r.ctx), conflicts))
+            conflictstr = b"changed files overlap with {0}".format(conflictstr)
         else:
-            conflictstr = 'no overlap'
+            conflictstr = b'no overlap'
 
         win.addstr(y, 1, conflictstr[:length])
         win.noutrefresh()
 
     def helplines(mode):
         if mode == MODE_PATCH:
-            help = """\
+            help = b"""\
 ?: help, k/up: line up, j/down: line down, v: stop viewing patch
 pgup: prev page, space/pgdn: next page, c: commit, q: abort
 """
         else:
-            help = """\
+            help = b"""\
 ?: help, k/up: move up, j/down: move down, space: select, v: view patch
 d: drop, e: edit, f: fold, m: mess, p: pick, r: roll
 pgup/K: move patch up, pgdn/J: move patch down, c: commit, q: abort
@@ -1442,7 +1445,7 @@
 
     def renderhelp(win, state):
         maxy, maxx = win.getmaxyx()
-        mode, _ = state['mode']
+        mode, _ = state[b'mode']
         for y, line in enumerate(helplines(mode)):
             if y >= maxy:
                 break
@@ -1450,23 +1453,23 @@
         win.noutrefresh()
 
     def renderrules(rulesscr, state):
-        rules = state['rules']
-        pos = state['pos']
-        selected = state['selected']
-        start = state['modes'][MODE_RULES]['line_offset']
+        rules = state[b'rules']
+        pos = state[b'pos']
+        selected = state[b'selected']
+        start = state[b'modes'][MODE_RULES][b'line_offset']
 
         conflicts = [r.ctx for r in rules if r.conflicts]
         if len(conflicts) > 0:
-            line = "potential conflict in %s" % ','.join(map(str, conflicts))
+            line = b"potential conflict in %s" % b','.join(map(str, conflicts))
             addln(rulesscr, -1, 0, line, curses.color_pair(COLOR_WARN))
 
         for y, rule in enumerate(rules[start:]):
-            if y >= state['page_height']:
+            if y >= state[b'page_height']:
                 break
             if len(rule.conflicts) > 0:
-                rulesscr.addstr(y, 0, " ", curses.color_pair(COLOR_WARN))
+                rulesscr.addstr(y, 0, b" ", curses.color_pair(COLOR_WARN))
             else:
-                rulesscr.addstr(y, 0, " ", curses.COLOR_BLACK)
+                rulesscr.addstr(y, 0, b" ", curses.COLOR_BLACK)
             if y + start == selected:
                 addln(rulesscr, y, 2, rule, curses.color_pair(COLOR_SELECTED))
             elif y + start == pos:
@@ -1487,15 +1490,15 @@
         for y in range(0, length):
             line = output[y]
             if diffcolors:
-                if line and line[0] == '+':
+                if line and line[0] == b'+':
                     win.addstr(
                         y, 0, line, curses.color_pair(COLOR_DIFF_ADD_LINE)
                     )
-                elif line and line[0] == '-':
+                elif line and line[0] == b'-':
                     win.addstr(
                         y, 0, line, curses.color_pair(COLOR_DIFF_DEL_LINE)
                     )
-                elif line.startswith('@@ '):
+                elif line.startswith(b'@@ '):
                     win.addstr(y, 0, line, curses.color_pair(COLOR_DIFF_OFFSET))
                 else:
                     win.addstr(y, 0, line)
@@ -1504,17 +1507,17 @@
         win.noutrefresh()
 
     def renderpatch(win, state):
-        start = state['modes'][MODE_PATCH]['line_offset']
-        content = state['modes'][MODE_PATCH]['patchcontents']
+        start = state[b'modes'][MODE_PATCH][b'line_offset']
+        content = state[b'modes'][MODE_PATCH][b'patchcontents']
         renderstring(win, state, content[start:], diffcolors=True)
 
     def layout(mode):
         maxy, maxx = stdscr.getmaxyx()
         helplen = len(helplines(mode))
         return {
-            'commit': (12, maxx),
-            'help': (helplen, maxx),
-            'main': (maxy - helplen - 12, maxx),
+            b'commit': (12, maxx),
+            b'help': (helplen, maxx),
+            b'main': (maxy - helplen - 12, maxx),
         }
 
     def drawvertwin(size, y, x):
@@ -1523,16 +1526,16 @@
         return win, y, x
 
     state = {
-        'pos': 0,
-        'rules': rules,
-        'selected': None,
-        'mode': (MODE_INIT, MODE_INIT),
-        'page_height': None,
-        'modes': {
-            MODE_RULES: {'line_offset': 0,},
-            MODE_PATCH: {'line_offset': 0,},
+        b'pos': 0,
+        b'rules': rules,
+        b'selected': None,
+        b'mode': (MODE_INIT, MODE_INIT),
+        b'page_height': None,
+        b'modes': {
+            MODE_RULES: {b'line_offset': 0,},
+            MODE_PATCH: {b'line_offset': 0,},
         },
-        'repo': repo,
+        b'repo': repo,
     }
 
     # eventloop
@@ -1541,7 +1544,7 @@
     stdscr.refresh()
     while True:
         try:
-            oldmode, _ = state['mode']
+            oldmode, _ = state[b'mode']
             if oldmode == MODE_INIT:
                 changemode(state, MODE_RULES)
             e = event(state, ch)
@@ -1549,36 +1552,36 @@
             if e == E_QUIT:
                 return False
             if e == E_HISTEDIT:
-                return state['rules']
+                return state[b'rules']
             else:
                 if e == E_RESIZE:
                     size = screen_size()
                     if size != stdscr.getmaxyx():
                         curses.resizeterm(*size)
 
-                curmode, _ = state['mode']
+                curmode, _ = state[b'mode']
                 sizes = layout(curmode)
                 if curmode != oldmode:
-                    state['page_height'] = sizes['main'][0]
+                    state[b'page_height'] = sizes[b'main'][0]
                     # Adjust the view to fit the current screen size.
-                    movecursor(state, state['pos'], state['pos'])
+                    movecursor(state, state[b'pos'], state[b'pos'])
 
                 # Pack the windows against the top, each pane spread across the
                 # full width of the screen.
                 y, x = (0, 0)
-                helpwin, y, x = drawvertwin(sizes['help'], y, x)
-                mainwin, y, x = drawvertwin(sizes['main'], y, x)
-                commitwin, y, x = drawvertwin(sizes['commit'], y, x)
+                helpwin, y, x = drawvertwin(sizes[b'help'], y, x)
+                mainwin, y, x = drawvertwin(sizes[b'main'], y, x)
+                commitwin, y, x = drawvertwin(sizes[b'commit'], y, x)
 
                 if e in (E_PAGEDOWN, E_PAGEUP, E_LINEDOWN, E_LINEUP):
                     if e == E_PAGEDOWN:
-                        changeview(state, +1, 'page')
+                        changeview(state, +1, b'page')
                     elif e == E_PAGEUP:
-                        changeview(state, -1, 'page')
+                        changeview(state, -1, b'page')
                     elif e == E_LINEDOWN:
-                        changeview(state, +1, 'line')
+                        changeview(state, +1, b'line')
                     elif e == E_LINEUP:
-                        changeview(state, -1, 'line')
+                        changeview(state, -1, b'line')
 
                 # start rendering
                 commitwin.erase()
@@ -1606,22 +1609,22 @@
     to see an extensive help. Requires python-curses to be installed."""
 
     if curses is None:
-        raise error.Abort(_("Python curses library required"))
+        raise error.Abort(_(b"Python curses library required"))
 
     # disable color
     ui._colormode = None
 
     try:
-        keep = opts.get('keep')
-        revs = opts.get('rev', [])[:]
+        keep = opts.get(b'keep')
+        revs = opts.get(b'rev', [])[:]
         cmdutil.checkunfinished(repo)
         cmdutil.bailifchanged(repo)
 
-        if os.path.exists(os.path.join(repo.path, 'histedit-state')):
+        if os.path.exists(os.path.join(repo.path, b'histedit-state')):
             raise error.Abort(
                 _(
-                    'history edit already in progress, try '
-                    '--continue or --abort'
+                    b'history edit already in progress, try '
+                    b'--continue or --abort'
                 )
             )
         revs.extend(freeargs)
@@ -1631,15 +1634,15 @@
                 revs.append(defaultrev)
         if len(revs) != 1:
             raise error.Abort(
-                _('histedit requires exactly one ancestor revision')
+                _(b'histedit requires exactly one ancestor revision')
             )
 
-        rr = list(repo.set('roots(%ld)', scmutil.revrange(repo, revs)))
+        rr = list(repo.set(b'roots(%ld)', scmutil.revrange(repo, revs)))
         if len(rr) != 1:
             raise error.Abort(
                 _(
-                    'The specified revisions must have '
-                    'exactly one common root'
+                    b'The specified revisions must have '
+                    b'exactly one common root'
                 )
             )
         root = rr[0].node()
@@ -1648,7 +1651,7 @@
         revs = between(repo, root, topmost, keep)
         if not revs:
             raise error.Abort(
-                _('%s is not an ancestor of working directory')
+                _(b'%s is not an ancestor of working directory')
                 % node.short(root)
             )
 
@@ -1663,16 +1666,16 @@
         curses.echo()
         curses.endwin()
         if rc is False:
-            ui.write(_("histedit aborted\n"))
+            ui.write(_(b"histedit aborted\n"))
             return 0
         if type(rc) is list:
-            ui.status(_("performing changes\n"))
+            ui.status(_(b"performing changes\n"))
             rules = makecommands(rc)
-            filename = repo.vfs.join('chistedit')
-            with open(filename, 'w+') as fp:
+            filename = repo.vfs.join(b'chistedit')
+            with open(filename, b'w+') as fp:
                 for r in rules:
                     fp.write(r)
-            opts['commands'] = filename
+            opts[b'commands'] = filename
             return _texthistedit(ui, repo, *freeargs, **opts)
     except KeyboardInterrupt:
         pass
@@ -1680,30 +1683,35 @@
 
 
 @command(
-    'histedit',
+    b'histedit',
     [
         (
-            '',
-            'commands',
-            '',
-            _('read history edits from the specified file'),
-            _('FILE'),
+            b'',
+            b'commands',
+            b'',
+            _(b'read history edits from the specified file'),
+            _(b'FILE'),
         ),
-        ('c', 'continue', False, _('continue an edit already in progress')),
-        ('', 'edit-plan', False, _('edit remaining actions list')),
-        ('k', 'keep', False, _("don't strip old nodes after edit is complete")),
-        ('', 'abort', False, _('abort an edit in progress')),
-        ('o', 'outgoing', False, _('changesets not found in destination')),
+        (b'c', b'continue', False, _(b'continue an edit already in progress')),
+        (b'', b'edit-plan', False, _(b'edit remaining actions list')),
         (
-            'f',
-            'force',
+            b'k',
+            b'keep',
             False,
-            _('force outgoing even for unrelated repositories'),
+            _(b"don't strip old nodes after edit is complete"),
         ),
-        ('r', 'rev', [], _('first revision to be edited'), _('REV')),
+        (b'', b'abort', False, _(b'abort an edit in progress')),
+        (b'o', b'outgoing', False, _(b'changesets not found in destination')),
+        (
+            b'f',
+            b'force',
+            False,
+            _(b'force outgoing even for unrelated repositories'),
+        ),
+        (b'r', b'rev', [], _(b'first revision to be edited'), _(b'REV')),
     ]
     + cmdutil.formatteropts,
-    _("[OPTIONS] ([ANCESTOR] | --outgoing [URL])"),
+    _(b"[OPTIONS] ([ANCESTOR] | --outgoing [URL])"),
     helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
 )
 def histedit(ui, repo, *freeargs, **opts):
@@ -1809,7 +1817,7 @@
     # or continuing, so fall back to regular _texthistedit for those
     # operations.
     if (
-        ui.interface('histedit') == 'curses'
+        ui.interface(b'histedit') == b'curses'
         and _getgoal(pycompat.byteskwargs(opts)) == goalnew
     ):
         return _chistedit(ui, repo, *freeargs, **opts)
@@ -1824,10 +1832,10 @@
         _histedit(ui, repo, state, *freeargs, **opts)
 
 
-goalcontinue = 'continue'
-goalabort = 'abort'
-goaleditplan = 'edit-plan'
-goalnew = 'new'
+goalcontinue = b'continue'
+goalabort = b'abort'
+goaleditplan = b'edit-plan'
+goalnew = b'new'
 
 
 def _getgoal(opts):
@@ -1841,11 +1849,11 @@
 
 
 def _readfile(ui, path):
-    if path == '-':
-        with ui.timeblockedsection('histedit'):
+    if path == b'-':
+        with ui.timeblockedsection(b'histedit'):
             return ui.fin.read()
     else:
-        with open(path, 'rb') as f:
+        with open(path, b'rb') as f:
             return f.read()
 
 
@@ -1854,40 +1862,40 @@
     # blanket if mq patches are applied somewhere
     mq = getattr(repo, 'mq', None)
     if mq and mq.applied:
-        raise error.Abort(_('source has mq patches applied'))
+        raise error.Abort(_(b'source has mq patches applied'))
 
     # basic argument incompatibility processing
-    outg = opts.get('outgoing')
-    editplan = opts.get('edit_plan')
-    abort = opts.get('abort')
-    force = opts.get('force')
+    outg = opts.get(b'outgoing')
+    editplan = opts.get(b'edit_plan')
+    abort = opts.get(b'abort')
+    force = opts.get(b'force')
     if force and not outg:
-        raise error.Abort(_('--force only allowed with --outgoing'))
-    if goal == 'continue':
+        raise error.Abort(_(b'--force only allowed with --outgoing'))
+    if goal == b'continue':
         if any((outg, abort, revs, freeargs, rules, editplan)):
-            raise error.Abort(_('no arguments allowed with --continue'))
-    elif goal == 'abort':
+            raise error.Abort(_(b'no arguments allowed with --continue'))
+    elif goal == b'abort':
         if any((outg, revs, freeargs, rules, editplan)):
-            raise error.Abort(_('no arguments allowed with --abort'))
-    elif goal == 'edit-plan':
+            raise error.Abort(_(b'no arguments allowed with --abort'))
+    elif goal == b'edit-plan':
         if any((outg, revs, freeargs)):
             raise error.Abort(
-                _('only --commands argument allowed with ' '--edit-plan')
+                _(b'only --commands argument allowed with ' b'--edit-plan')
             )
     else:
         if state.inprogress():
             raise error.Abort(
                 _(
-                    'history edit already in progress, try '
-                    '--continue or --abort'
+                    b'history edit already in progress, try '
+                    b'--continue or --abort'
                 )
             )
         if outg:
             if revs:
-                raise error.Abort(_('no revisions allowed with --outgoing'))
+                raise error.Abort(_(b'no revisions allowed with --outgoing'))
             if len(freeargs) > 1:
                 raise error.Abort(
-                    _('only one repo argument allowed with --outgoing')
+                    _(b'only one repo argument allowed with --outgoing')
                 )
         else:
             revs.extend(freeargs)
@@ -1898,19 +1906,19 @@
 
             if len(revs) != 1:
                 raise error.Abort(
-                    _('histedit requires exactly one ancestor revision')
+                    _(b'histedit requires exactly one ancestor revision')
                 )
 
 
 def _histedit(ui, repo, state, *freeargs, **opts):
     opts = pycompat.byteskwargs(opts)
-    fm = ui.formatter('histedit', opts)
+    fm = ui.formatter(b'histedit', opts)
     fm.startitem()
     goal = _getgoal(opts)
-    revs = opts.get('rev', [])
-    nobackup = not ui.configbool('rewrite', 'backup-bundle')
-    rules = opts.get('commands', '')
-    state.keep = opts.get('keep', False)
+    revs = opts.get(b'rev', [])
+    nobackup = not ui.configbool(b'rewrite', b'backup-bundle')
+    rules = opts.get(b'commands', b'')
+    state.keep = opts.get(b'keep', False)
 
     _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs)
 
@@ -1919,19 +1927,19 @@
         revs = scmutil.revrange(repo, revs)
         ctxs = [repo[rev] for rev in revs]
         for ctx in ctxs:
-            tags = [tag for tag in ctx.tags() if tag != 'tip']
+            tags = [tag for tag in ctx.tags() if tag != b'tip']
             if not hastags:
                 hastags = len(tags)
     if hastags:
         if ui.promptchoice(
             _(
-                'warning: tags associated with the given'
-                ' changeset will be lost after histedit.\n'
-                'do you want to continue (yN)? $$ &Yes $$ &No'
+                b'warning: tags associated with the given'
+                b' changeset will be lost after histedit.\n'
+                b'do you want to continue (yN)? $$ &Yes $$ &No'
             ),
             default=1,
         ):
-            raise error.Abort(_('histedit cancelled\n'))
+            raise error.Abort(_(b'histedit cancelled\n'))
     # rebuild state
     if goal == goalcontinue:
         state.read()
@@ -1960,7 +1968,7 @@
     # and only show one editor
     actions = state.actions[:]
     for idx, (action, nextact) in enumerate(zip(actions, actions[1:] + [None])):
-        if action.verb == 'fold' and nextact and nextact.verb == 'fold':
+        if action.verb == b'fold' and nextact and nextact.verb == b'fold':
             state.actions[idx].__class__ = _multifold
 
     # Force an initial state file write, so the user can run --abort/continue
@@ -1971,13 +1979,13 @@
     # Don't use singletransaction by default since it rolls the entire
     # transaction back if an unexpected exception happens (like a
     # pretxncommit hook throws, or the user aborts the commit msg editor).
-    if ui.configbool("histedit", "singletransaction"):
+    if ui.configbool(b"histedit", b"singletransaction"):
         # Don't use a 'with' for the transaction, since actions may close
         # and reopen a transaction. For example, if the action executes an
         # external process it may choose to commit the transaction first.
-        tr = repo.transaction('histedit')
+        tr = repo.transaction(b'histedit')
     progress = ui.makeprogress(
-        _("editing"), unit=_('changes'), total=len(state.actions)
+        _(b"editing"), unit=_(b'changes'), total=len(state.actions)
     )
     with progress, util.acceptintervention(tr):
         while state.actions:
@@ -1985,7 +1993,7 @@
             actobj = state.actions[0]
             progress.increment(item=actobj.torule())
             ui.debug(
-                'histedit: processing %s %s\n' % (actobj.verb, actobj.torule())
+                b'histedit: processing %s %s\n' % (actobj.verb, actobj.torule())
             )
             parentctx, replacement_ = actobj.run()
             state.parentctxnode = parentctx.node()
@@ -2003,14 +2011,14 @@
     if mapping:
         for prec, succs in mapping.iteritems():
             if not succs:
-                ui.debug('histedit: %s is dropped\n' % node.short(prec))
+                ui.debug(b'histedit: %s is dropped\n' % node.short(prec))
             else:
                 ui.debug(
-                    'histedit: %s is replaced by %s\n'
+                    b'histedit: %s is replaced by %s\n'
                     % (node.short(prec), node.short(succs[0]))
                 )
                 if len(succs) > 1:
-                    m = 'histedit:                            %s'
+                    m = b'histedit:                            %s'
                     for n in succs[1:]:
                         ui.debug(m % node.short(n))
 
@@ -2032,48 +2040,52 @@
         for k, v in mapping.items()
         if k in nodemap and all(n in nodemap for n in v)
     }
-    scmutil.cleanupnodes(repo, mapping, 'histedit')
+    scmutil.cleanupnodes(repo, mapping, b'histedit')
     hf = fm.hexfunc
     fl = fm.formatlist
     fd = fm.formatdict
     nodechanges = fd(
         {
-            hf(oldn): fl([hf(n) for n in newn], name='node')
+            hf(oldn): fl([hf(n) for n in newn], name=b'node')
             for oldn, newn in mapping.iteritems()
         },
-        key="oldnode",
-        value="newnodes",
+        key=b"oldnode",
+        value=b"newnodes",
     )
     fm.data(nodechanges=nodechanges)
 
     state.clear()
-    if os.path.exists(repo.sjoin('undo')):
-        os.unlink(repo.sjoin('undo'))
-    if repo.vfs.exists('histedit-last-edit.txt'):
-        repo.vfs.unlink('histedit-last-edit.txt')
+    if os.path.exists(repo.sjoin(b'undo')):
+        os.unlink(repo.sjoin(b'undo'))
+    if repo.vfs.exists(b'histedit-last-edit.txt'):
+        repo.vfs.unlink(b'histedit-last-edit.txt')
 
 
 def _aborthistedit(ui, repo, state, nobackup=False):
     try:
         state.read()
         __, leafs, tmpnodes, __ = processreplacement(state)
-        ui.debug('restore wc to old parent %s\n' % node.short(state.topmost))
+        ui.debug(b'restore wc to old parent %s\n' % node.short(state.topmost))
 
         # Recover our old commits if necessary
         if not state.topmost in repo and state.backupfile:
             backupfile = repo.vfs.join(state.backupfile)
             f = hg.openpath(ui, backupfile)
             gen = exchange.readbundle(ui, f, backupfile)
-            with repo.transaction('histedit.abort') as tr:
+            with repo.transaction(b'histedit.abort') as tr:
                 bundle2.applybundle(
-                    repo, gen, tr, source='histedit', url='bundle:' + backupfile
+                    repo,
+                    gen,
+                    tr,
+                    source=b'histedit',
+                    url=b'bundle:' + backupfile,
                 )
 
             os.remove(backupfile)
 
         # check whether we should update away
         if repo.unfiltered().revs(
-            'parents() and (%n  or %ln::)',
+            b'parents() and (%n  or %ln::)',
             state.parentctxnode,
             leafs | tmpnodes,
         ):
@@ -2084,9 +2096,9 @@
         if state.inprogress():
             ui.warn(
                 _(
-                    'warning: encountered an exception during histedit '
-                    '--abort; the repository may not have been completely '
-                    'cleaned up\n'
+                    b'warning: encountered an exception during histedit '
+                    b'--abort; the repository may not have been completely '
+                    b'cleaned up\n'
                 )
             )
         raise
@@ -2096,7 +2108,7 @@
 
 def hgaborthistedit(ui, repo):
     state = histeditstate(repo)
-    nobackup = not ui.configbool('rewrite', 'backup-bundle')
+    nobackup = not ui.configbool(b'rewrite', b'backup-bundle')
     with repo.wlock() as wlock, repo.lock() as lock:
         state.wlock = wlock
         state.lock = lock
@@ -2120,9 +2132,9 @@
 
 
 def _newhistedit(ui, repo, state, revs, freeargs, opts):
-    outg = opts.get('outgoing')
-    rules = opts.get('commands', '')
-    force = opts.get('force')
+    outg = opts.get(b'outgoing')
+    rules = opts.get(b'commands', b'')
+    force = opts.get(b'force')
 
     cmdutil.checkunfinished(repo)
     cmdutil.bailifchanged(repo)
@@ -2135,12 +2147,12 @@
             remote = None
         root = findoutgoing(ui, repo, remote, force, opts)
     else:
-        rr = list(repo.set('roots(%ld)', scmutil.revrange(repo, revs)))
+        rr = list(repo.set(b'roots(%ld)', scmutil.revrange(repo, revs)))
         if len(rr) != 1:
             raise error.Abort(
                 _(
-                    'The specified revisions must have '
-                    'exactly one common root'
+                    b'The specified revisions must have '
+                    b'exactly one common root'
                 )
             )
         root = rr[0].node()
@@ -2148,7 +2160,7 @@
     revs = between(repo, root, topmost, state.keep)
     if not revs:
         raise error.Abort(
-            _('%s is not an ancestor of working directory') % node.short(root)
+            _(b'%s is not an ancestor of working directory') % node.short(root)
         )
 
     ctxs = [repo[r] for r in revs]
@@ -2157,7 +2169,7 @@
     # Please don't ask me why `ancestors` is this value. I figured it
     # out with print-debugging, not by actually understanding what the
     # merge code is doing. :(
-    ancs = [repo['.']]
+    ancs = [repo[b'.']]
     # Sniff-test to make sure we won't collide with untracked files in
     # the working directory. If we don't do this, we can get a
     # collision after we've started histedit and backing out gets ugly
@@ -2179,7 +2191,7 @@
         except error.Abort:
             raise error.Abort(
                 _(
-                    "untracked files in working directory conflict with files in %s"
+                    b"untracked files in working directory conflict with files in %s"
                 )
                 % c
             )
@@ -2201,8 +2213,8 @@
     state.replacements = []
 
     ui.log(
-        "histedit",
-        "%d actions to histedit\n",
+        b"histedit",
+        b"%d actions to histedit\n",
         len(actions),
         histedit_num_actions=len(actions),
     )
@@ -2211,7 +2223,7 @@
     backupfile = None
     if not obsolete.isenabled(repo, obsolete.createmarkersopt):
         backupfile = repair.backupbundle(
-            repo, [parentctxnode], [topmost], root, 'histedit'
+            repo, [parentctxnode], [topmost], root, b'histedit'
         )
     state.backupfile = backupfile
 
@@ -2219,7 +2231,7 @@
 def _getsummary(ctx):
     # a common pattern is to extract the summary but default to the empty
     # string
-    summary = ctx.description() or ''
+    summary = ctx.description() or b''
     if summary:
         summary = summary.splitlines()[0]
     return summary
@@ -2251,43 +2263,43 @@
     """select and validate the set of revision to edit
 
     When keep is false, the specified set can't have children."""
-    revs = repo.revs('%n::%n', old, new)
+    revs = repo.revs(b'%n::%n', old, new)
     if revs and not keep:
         if not obsolete.isenabled(
             repo, obsolete.allowunstableopt
-        ) and repo.revs('(%ld::) - (%ld)', revs, revs):
+        ) and repo.revs(b'(%ld::) - (%ld)', revs, revs):
             raise error.Abort(
                 _(
-                    'can only histedit a changeset together '
-                    'with all its descendants'
+                    b'can only histedit a changeset together '
+                    b'with all its descendants'
                 )
             )
-        if repo.revs('(%ld) and merge()', revs):
-            raise error.Abort(_('cannot edit history that contains merges'))
+        if repo.revs(b'(%ld) and merge()', revs):
+            raise error.Abort(_(b'cannot edit history that contains merges'))
         root = repo[revs.first()]  # list is already sorted by repo.revs()
         if not root.mutable():
             raise error.Abort(
-                _('cannot edit public changeset: %s') % root,
-                hint=_("see 'hg help phases' for details"),
+                _(b'cannot edit public changeset: %s') % root,
+                hint=_(b"see 'hg help phases' for details"),
             )
     return pycompat.maplist(repo.changelog.node, revs)
 
 
-def ruleeditor(repo, ui, actions, editcomment=""):
+def ruleeditor(repo, ui, actions, editcomment=b""):
     """open an editor to edit rules
 
     rules are in the format [ [act, ctx], ...] like in state.rules
     """
-    if repo.ui.configbool("experimental", "histedit.autoverb"):
+    if repo.ui.configbool(b"experimental", b"histedit.autoverb"):
         newact = util.sortdict()
         for act in actions:
             ctx = repo[act.node]
             summary = _getsummary(ctx)
-            fword = summary.split(' ', 1)[0].lower()
+            fword = summary.split(b' ', 1)[0].lower()
             added = False
 
             # if it doesn't end with the special character '!' just skip this
-            if fword.endswith('!'):
+            if fword.endswith(b'!'):
                 fword = fword[:-1]
                 if fword in primaryactions | secondaryactions | tertiaryactions:
                     act.verb = fword
@@ -2312,21 +2324,21 @@
             actions.append(na)
             actions += l
 
-    rules = '\n'.join([act.torule() for act in actions])
-    rules += '\n\n'
+    rules = b'\n'.join([act.torule() for act in actions])
+    rules += b'\n\n'
     rules += editcomment
     rules = ui.edit(
         rules,
         ui.username(),
-        {'prefix': 'histedit'},
+        {b'prefix': b'histedit'},
         repopath=repo.path,
-        action='histedit',
+        action=b'histedit',
     )
 
     # Save edit rules in .hg/histedit-last-edit.txt in case
     # the user needs to ask for help after something
     # surprising happens.
-    with repo.vfs('histedit-last-edit.txt', 'wb') as f:
+    with repo.vfs(b'histedit-last-edit.txt', b'wb') as f:
         f.write(rules)
 
     return rules
@@ -2337,16 +2349,16 @@
     rules = [
         l
         for l in (r.strip() for r in rules.splitlines())
-        if l and not l.startswith('#')
+        if l and not l.startswith(b'#')
     ]
     actions = []
     for r in rules:
-        if ' ' not in r:
-            raise error.ParseError(_('malformed line "%s"') % r)
-        verb, rest = r.split(' ', 1)
+        if b' ' not in r:
+            raise error.ParseError(_(b'malformed line "%s"') % r)
+        verb, rest = r.split(b' ', 1)
 
         if verb not in actiontable:
-            raise error.ParseError(_('unknown action "%s"') % verb)
+            raise error.ParseError(_(b'unknown action "%s"') % verb)
 
         action = actiontable[verb].fromrule(state, rest)
         actions.append(action)
@@ -2357,11 +2369,11 @@
     try:
         verifyactions(actions, state, ctxs)
     except error.ParseError:
-        if repo.vfs.exists('histedit-last-edit.txt'):
+        if repo.vfs.exists(b'histedit-last-edit.txt'):
             ui.warn(
                 _(
-                    'warning: histedit rules saved '
-                    'to: .hg/histedit-last-edit.txt\n'
+                    b'warning: histedit rules saved '
+                    b'to: .hg/histedit-last-edit.txt\n'
                 )
             )
         raise
@@ -2378,9 +2390,9 @@
     seen = set()
     prev = None
 
-    if actions and actions[0].verb in ['roll', 'fold']:
+    if actions and actions[0].verb in [b'roll', b'fold']:
         raise error.ParseError(
-            _('first changeset cannot use verb "%s"') % actions[0].verb
+            _(b'first changeset cannot use verb "%s"') % actions[0].verb
         )
 
     for action in actions:
@@ -2390,11 +2402,11 @@
             seen.add(action.node)
     missing = sorted(expected - seen)  # sort to stabilize output
 
-    if state.repo.ui.configbool('histedit', 'dropmissing'):
+    if state.repo.ui.configbool(b'histedit', b'dropmissing'):
         if len(actions) == 0:
             raise error.ParseError(
-                _('no rules provided'),
-                hint=_('use strip extension to remove commits'),
+                _(b'no rules provided'),
+                hint=_(b'use strip extension to remove commits'),
             )
 
         drops = [drop(state, n) for n in missing]
@@ -2403,10 +2415,10 @@
         actions[:0] = drops
     elif missing:
         raise error.ParseError(
-            _('missing rules for changeset %s') % node.short(missing[0]),
+            _(b'missing rules for changeset %s') % node.short(missing[0]),
             hint=_(
-                'use "drop %s" to discard, see also: '
-                "'hg help -e histedit.config'"
+                b'use "drop %s" to discard, see also: '
+                b"'hg help -e histedit.config'"
             )
             % node.short(missing[0]),
         )
@@ -2522,7 +2534,7 @@
         return
     oldbmarks = repo.nodebookmarks(oldtopmost)
     if oldbmarks:
-        with repo.lock(), repo.transaction('histedit') as tr:
+        with repo.lock(), repo.transaction(b'histedit') as tr:
             marks = repo._bookmarks
             changes = []
             for name in oldbmarks:
@@ -2543,7 +2555,7 @@
         # (we use %lr instead of %ln to silently ignore unknown items)
         nm = repo.changelog.nodemap
         nodes = sorted(n for n in nodes if n in nm)
-        roots = [c.node() for c in repo.set("roots(%ln)", nodes)]
+        roots = [c.node() for c in repo.set(b"roots(%ln)", nodes)]
         if roots:
             backup = not nobackup
             repair.strip(ui, repo, roots, backup=backup)
@@ -2561,13 +2573,13 @@
         common_nodes = histedit_nodes & set(nodelist)
         if common_nodes:
             raise error.Abort(
-                _("histedit in progress, can't strip %s")
-                % ', '.join(node.short(x) for x in common_nodes)
+                _(b"histedit in progress, can't strip %s")
+                % b', '.join(node.short(x) for x in common_nodes)
             )
     return orig(ui, repo, nodelist, *args, **kwargs)
 
 
-extensions.wrapfunction(repair, 'strip', stripwrapper)
+extensions.wrapfunction(repair, b'strip', stripwrapper)
 
 
 def summaryhook(ui, repo):
@@ -2578,19 +2590,19 @@
     if state.actions:
         # i18n: column positioning for "hg summary"
         ui.write(
-            _('hist:   %s (histedit --continue)\n')
+            _(b'hist:   %s (histedit --continue)\n')
             % (
-                ui.label(_('%d remaining'), 'histedit.remaining')
+                ui.label(_(b'%d remaining'), b'histedit.remaining')
                 % len(state.actions)
             )
         )
 
 
 def extsetup(ui):
-    cmdutil.summaryhooks.add('histedit', summaryhook)
+    cmdutil.summaryhooks.add(b'histedit', summaryhook)
     statemod.addunfinished(
-        'histedit',
-        fname='histedit-state',
+        b'histedit',
+        fname=b'histedit-state',
         allowcommit=True,
         continueflag=True,
         abortfunc=hgaborthistedit,
--- a/hgext/infinitepush/__init__.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/infinitepush/__init__.py	Sun Oct 06 09:48:39 2019 -0400
@@ -138,48 +138,48 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 configtable = {}
 configitem = registrar.configitem(configtable)
 
 configitem(
-    'infinitepush', 'server', default=False,
+    b'infinitepush', b'server', default=False,
 )
 configitem(
-    'infinitepush', 'storetype', default='',
+    b'infinitepush', b'storetype', default=b'',
 )
 configitem(
-    'infinitepush', 'indextype', default='',
+    b'infinitepush', b'indextype', default=b'',
 )
 configitem(
-    'infinitepush', 'indexpath', default='',
+    b'infinitepush', b'indexpath', default=b'',
 )
 configitem(
-    'infinitepush', 'storeallparts', default=False,
+    b'infinitepush', b'storeallparts', default=False,
 )
 configitem(
-    'infinitepush', 'reponame', default='',
+    b'infinitepush', b'reponame', default=b'',
 )
 configitem(
-    'scratchbranch', 'storepath', default='',
+    b'scratchbranch', b'storepath', default=b'',
 )
 configitem(
-    'infinitepush', 'branchpattern', default='',
+    b'infinitepush', b'branchpattern', default=b'',
 )
 configitem(
-    'infinitepush', 'pushtobundlestore', default=False,
+    b'infinitepush', b'pushtobundlestore', default=False,
 )
 configitem(
-    'experimental', 'server-bundlestore-bookmark', default='',
+    b'experimental', b'server-bundlestore-bookmark', default=b'',
 )
 configitem(
-    'experimental', 'infinitepush-scratchpush', default=False,
+    b'experimental', b'infinitepush-scratchpush', default=False,
 )
 
-experimental = 'experimental'
-configbookmark = 'server-bundlestore-bookmark'
-configscratchpush = 'infinitepush-scratchpush'
+experimental = b'experimental'
+configbookmark = b'server-bundlestore-bookmark'
+configscratchpush = b'infinitepush-scratchpush'
 
 scratchbranchparttype = bundleparts.scratchbranchparttype
 revsetpredicate = registrar.revsetpredicate()
@@ -189,31 +189,31 @@
 
 
 def _buildexternalbundlestore(ui):
-    put_args = ui.configlist('infinitepush', 'put_args', [])
-    put_binary = ui.config('infinitepush', 'put_binary')
+    put_args = ui.configlist(b'infinitepush', b'put_args', [])
+    put_binary = ui.config(b'infinitepush', b'put_binary')
     if not put_binary:
-        raise error.Abort('put binary is not specified')
-    get_args = ui.configlist('infinitepush', 'get_args', [])
-    get_binary = ui.config('infinitepush', 'get_binary')
+        raise error.Abort(b'put binary is not specified')
+    get_args = ui.configlist(b'infinitepush', b'get_args', [])
+    get_binary = ui.config(b'infinitepush', b'get_binary')
     if not get_binary:
-        raise error.Abort('get binary is not specified')
+        raise error.Abort(b'get binary is not specified')
     from . import store
 
     return store.externalbundlestore(put_binary, put_args, get_binary, get_args)
 
 
 def _buildsqlindex(ui):
-    sqlhost = ui.config('infinitepush', 'sqlhost')
+    sqlhost = ui.config(b'infinitepush', b'sqlhost')
     if not sqlhost:
-        raise error.Abort(_('please set infinitepush.sqlhost'))
-    host, port, db, user, password = sqlhost.split(':')
-    reponame = ui.config('infinitepush', 'reponame')
+        raise error.Abort(_(b'please set infinitepush.sqlhost'))
+    host, port, db, user, password = sqlhost.split(b':')
+    reponame = ui.config(b'infinitepush', b'reponame')
     if not reponame:
-        raise error.Abort(_('please set infinitepush.reponame'))
+        raise error.Abort(_(b'please set infinitepush.reponame'))
 
-    logfile = ui.config('infinitepush', 'logfile', '')
-    waittimeout = ui.configint('infinitepush', 'waittimeout', 300)
-    locktimeout = ui.configint('infinitepush', 'locktimeout', 120)
+    logfile = ui.config(b'infinitepush', b'logfile', b'')
+    waittimeout = ui.configint(b'infinitepush', b'waittimeout', 300)
+    locktimeout = ui.configint(b'infinitepush', b'locktimeout', 120)
     from . import sqlindexapi
 
     return sqlindexapi.sqlindexapi(
@@ -231,10 +231,10 @@
 
 
 def _getloglevel(ui):
-    loglevel = ui.config('infinitepush', 'loglevel', 'DEBUG')
+    loglevel = ui.config(b'infinitepush', b'loglevel', b'DEBUG')
     numeric_loglevel = getattr(logging, loglevel.upper(), None)
     if not isinstance(numeric_loglevel, int):
-        raise error.Abort(_('invalid log level %s') % loglevel)
+        raise error.Abort(_(b'invalid log level %s') % loglevel)
     return numeric_loglevel
 
 
@@ -248,7 +248,7 @@
     '''
 
     if common.isremotebooksenabled(ui):
-        hoist = ui.config('remotenames', 'hoistedpeer') + '/'
+        hoist = ui.config(b'remotenames', b'hoistedpeer') + b'/'
         if remotebookmark.startswith(hoist):
             return remotebookmark[len(hoist) :]
     return remotebookmark
@@ -257,33 +257,33 @@
 class bundlestore(object):
     def __init__(self, repo):
         self._repo = repo
-        storetype = self._repo.ui.config('infinitepush', 'storetype')
-        if storetype == 'disk':
+        storetype = self._repo.ui.config(b'infinitepush', b'storetype')
+        if storetype == b'disk':
             from . import store
 
             self.store = store.filebundlestore(self._repo.ui, self._repo)
-        elif storetype == 'external':
+        elif storetype == b'external':
             self.store = _buildexternalbundlestore(self._repo.ui)
         else:
             raise error.Abort(
-                _('unknown infinitepush store type specified %s') % storetype
+                _(b'unknown infinitepush store type specified %s') % storetype
             )
 
-        indextype = self._repo.ui.config('infinitepush', 'indextype')
-        if indextype == 'disk':
+        indextype = self._repo.ui.config(b'infinitepush', b'indextype')
+        if indextype == b'disk':
             from . import fileindexapi
 
             self.index = fileindexapi.fileindexapi(self._repo)
-        elif indextype == 'sql':
+        elif indextype == b'sql':
             self.index = _buildsqlindex(self._repo.ui)
         else:
             raise error.Abort(
-                _('unknown infinitepush index type specified %s') % indextype
+                _(b'unknown infinitepush index type specified %s') % indextype
             )
 
 
 def _isserver(ui):
-    return ui.configbool('infinitepush', 'server')
+    return ui.configbool(b'infinitepush', b'server')
 
 
 def reposetup(ui, repo):
@@ -300,11 +300,11 @@
 
 
 def commonsetup(ui):
-    wireprotov1server.commands['listkeyspatterns'] = (
+    wireprotov1server.commands[b'listkeyspatterns'] = (
         wireprotolistkeyspatterns,
-        'namespace patterns',
+        b'namespace patterns',
     )
-    scratchbranchpat = ui.config('infinitepush', 'branchpattern')
+    scratchbranchpat = ui.config(b'infinitepush', b'branchpattern')
     if scratchbranchpat:
         global _scratchbranchmatcher
         kind, pat, _scratchbranchmatcher = stringutil.stringmatcher(
@@ -313,53 +313,53 @@
 
 
 def serverextsetup(ui):
-    origpushkeyhandler = bundle2.parthandlermapping['pushkey']
+    origpushkeyhandler = bundle2.parthandlermapping[b'pushkey']
 
     def newpushkeyhandler(*args, **kwargs):
         bundle2pushkey(origpushkeyhandler, *args, **kwargs)
 
     newpushkeyhandler.params = origpushkeyhandler.params
-    bundle2.parthandlermapping['pushkey'] = newpushkeyhandler
+    bundle2.parthandlermapping[b'pushkey'] = newpushkeyhandler
 
-    orighandlephasehandler = bundle2.parthandlermapping['phase-heads']
+    orighandlephasehandler = bundle2.parthandlermapping[b'phase-heads']
     newphaseheadshandler = lambda *args, **kwargs: bundle2handlephases(
         orighandlephasehandler, *args, **kwargs
     )
     newphaseheadshandler.params = orighandlephasehandler.params
-    bundle2.parthandlermapping['phase-heads'] = newphaseheadshandler
+    bundle2.parthandlermapping[b'phase-heads'] = newphaseheadshandler
 
     extensions.wrapfunction(
-        localrepo.localrepository, 'listkeys', localrepolistkeys
+        localrepo.localrepository, b'listkeys', localrepolistkeys
     )
-    wireprotov1server.commands['lookup'] = (
-        _lookupwrap(wireprotov1server.commands['lookup'][0]),
-        'key',
+    wireprotov1server.commands[b'lookup'] = (
+        _lookupwrap(wireprotov1server.commands[b'lookup'][0]),
+        b'key',
     )
-    extensions.wrapfunction(exchange, 'getbundlechunks', getbundlechunks)
+    extensions.wrapfunction(exchange, b'getbundlechunks', getbundlechunks)
 
-    extensions.wrapfunction(bundle2, 'processparts', processparts)
+    extensions.wrapfunction(bundle2, b'processparts', processparts)
 
 
 def clientextsetup(ui):
-    entry = extensions.wrapcommand(commands.table, 'push', _push)
+    entry = extensions.wrapcommand(commands.table, b'push', _push)
 
     entry[1].append(
         (
-            '',
-            'bundle-store',
+            b'',
+            b'bundle-store',
             None,
-            _('force push to go to bundle store (EXPERIMENTAL)'),
+            _(b'force push to go to bundle store (EXPERIMENTAL)'),
         )
     )
 
-    extensions.wrapcommand(commands.table, 'pull', _pull)
+    extensions.wrapcommand(commands.table, b'pull', _pull)
 
-    extensions.wrapfunction(discovery, 'checkheads', _checkheads)
+    extensions.wrapfunction(discovery, b'checkheads', _checkheads)
 
     wireprotov1peer.wirepeer.listkeyspatterns = listkeyspatterns
 
     partorder = exchange.b2partsgenorder
-    index = partorder.index('changeset')
+    index = partorder.index(b'changeset')
     partorder.insert(
         index, partorder.pop(partorder.index(scratchbranchparttype))
     )
@@ -378,14 +378,14 @@
 
 
 def localrepolistkeys(orig, self, namespace, patterns=None):
-    if namespace == 'bookmarks' and patterns:
+    if namespace == b'bookmarks' and patterns:
         index = self.bundlestore.index
         results = {}
         bookmarks = orig(self, namespace)
         for pattern in patterns:
             results.update(index.getbookmarks(pattern))
-            if pattern.endswith('*'):
-                pattern = 're:^' + pattern[:-1] + '.*'
+            if pattern.endswith(b'*'):
+                pattern = b're:^' + pattern[:-1] + b'.*'
             kind, pat, matcher = stringutil.stringmatcher(pattern)
             for bookmark, node in bookmarks.iteritems():
                 if matcher(bookmark):
@@ -397,21 +397,23 @@
 
 @wireprotov1peer.batchable
 def listkeyspatterns(self, namespace, patterns):
-    if not self.capable('pushkey'):
+    if not self.capable(b'pushkey'):
         yield {}, None
     f = wireprotov1peer.future()
-    self.ui.debug('preparing listkeys for "%s"\n' % namespace)
+    self.ui.debug(b'preparing listkeys for "%s"\n' % namespace)
     yield {
-        'namespace': encoding.fromlocal(namespace),
-        'patterns': wireprototypes.encodelist(patterns),
+        b'namespace': encoding.fromlocal(namespace),
+        b'patterns': wireprototypes.encodelist(patterns),
     }, f
     d = f.value
-    self.ui.debug('received listkey for "%s": %i bytes\n' % (namespace, len(d)))
+    self.ui.debug(
+        b'received listkey for "%s": %i bytes\n' % (namespace, len(d))
+    )
     yield pushkey.decodekeys(d)
 
 
 def _readbundlerevs(bundlerepo):
-    return list(bundlerepo.revs('bundle()'))
+    return list(bundlerepo.revs(b'bundle()'))
 
 
 def _includefilelogstobundle(bundlecaps, bundlerepo, bundlerevs, ui):
@@ -428,18 +430,18 @@
     if not changedfiles:
         return bundlecaps
 
-    changedfiles = '\0'.join(changedfiles)
+    changedfiles = b'\0'.join(changedfiles)
     newcaps = []
     appended = False
     for cap in bundlecaps or []:
-        if cap.startswith('excludepattern='):
-            newcaps.append('\0'.join((cap, changedfiles)))
+        if cap.startswith(b'excludepattern='):
+            newcaps.append(b'\0'.join((cap, changedfiles)))
             appended = True
         else:
             newcaps.append(cap)
     if not appended:
         # Not found excludepattern cap. Just append it
-        newcaps.append('excludepattern=' + changedfiles)
+        newcaps.append(b'excludepattern=' + changedfiles)
 
     return newcaps
 
@@ -452,14 +454,14 @@
     '''
     parts = []
 
-    version = '02'
+    version = b'02'
     outgoing = discovery.outgoing(
         bundlerepo, commonheads=bundleroots, missingheads=[unknownhead]
     )
-    cgstream = changegroup.makestream(bundlerepo, outgoing, version, 'pull')
+    cgstream = changegroup.makestream(bundlerepo, outgoing, version, b'pull')
     cgstream = util.chunkbuffer(cgstream).read()
-    cgpart = bundle2.bundlepart('changegroup', data=cgstream)
-    cgpart.addparam('version', version)
+    cgpart = bundle2.bundlepart(b'changegroup', data=cgstream)
+    cgpart.addparam(b'version', version)
     parts.append(cgpart)
 
     return parts
@@ -480,7 +482,7 @@
 
 
 def _needsrebundling(head, bundlerepo):
-    bundleheads = list(bundlerepo.revs('heads(bundle())'))
+    bundleheads = list(bundlerepo.revs(b'heads(bundle())'))
     return not (
         len(bundleheads) == 1 and bundlerepo[bundleheads[0]].node() == head
     )
@@ -493,18 +495,18 @@
     '''
     parts = []
     if not _needsrebundling(head, bundlerepo):
-        with util.posixfile(bundlefile, "rb") as f:
+        with util.posixfile(bundlefile, b"rb") as f:
             unbundler = exchange.readbundle(bundlerepo.ui, f, bundlefile)
             if isinstance(unbundler, changegroup.cg1unpacker):
                 part = bundle2.bundlepart(
-                    'changegroup', data=unbundler._stream.read()
+                    b'changegroup', data=unbundler._stream.read()
                 )
-                part.addparam('version', '01')
+                part.addparam(b'version', b'01')
                 parts.append(part)
             elif isinstance(unbundler, bundle2.unbundle20):
                 haschangegroup = False
                 for part in unbundler.iterparts():
-                    if part.type == 'changegroup':
+                    if part.type == b'changegroup':
                         haschangegroup = True
                     newpart = bundle2.bundlepart(part.type, data=part.read())
                     for key, value in part.params.iteritems():
@@ -513,12 +515,12 @@
 
                 if not haschangegroup:
                     raise error.Abort(
-                        'unexpected bundle without changegroup part, '
-                        + 'head: %s' % hex(head),
-                        hint='report to administrator',
+                        b'unexpected bundle without changegroup part, '
+                        + b'head: %s' % hex(head),
+                        hint=b'report to administrator',
                     )
             else:
-                raise error.Abort('unknown bundle type')
+                raise error.Abort(b'unknown bundle type')
     else:
         parts = _rebundle(bundlerepo, bundleroots, head)
 
@@ -539,7 +541,7 @@
             if head not in repo.changelog.nodemap:
                 if head not in nodestobundle:
                     newbundlefile = common.downloadbundle(repo, head)
-                    bundlepath = "bundle:%s+%s" % (repo.root, newbundlefile)
+                    bundlepath = b"bundle:%s+%s" % (repo.root, newbundlefile)
                     bundlerepo = hg.repository(repo.ui, bundlepath)
 
                     allbundlestocleanup.append((bundlerepo, newbundlefile))
@@ -576,7 +578,7 @@
     pullfrombundlestore = bool(scratchbundles)
     wrappedchangegrouppart = False
     wrappedlistkeys = False
-    oldchangegrouppart = exchange.getbundle2partsmapping['changegroup']
+    oldchangegrouppart = exchange.getbundle2partsmapping[b'changegroup']
     try:
 
         def _changegrouppart(bundler, *args, **kwargs):
@@ -589,20 +591,20 @@
                     bundler.addpart(part)
             return result
 
-        exchange.getbundle2partsmapping['changegroup'] = _changegrouppart
+        exchange.getbundle2partsmapping[b'changegroup'] = _changegrouppart
         wrappedchangegrouppart = True
 
         def _listkeys(orig, self, namespace):
             origvalues = orig(self, namespace)
-            if namespace == 'phases' and pullfrombundlestore:
-                if origvalues.get('publishing') == 'True':
+            if namespace == b'phases' and pullfrombundlestore:
+                if origvalues.get(b'publishing') == b'True':
                     # Make repo non-publishing to preserve draft phase
-                    del origvalues['publishing']
+                    del origvalues[b'publishing']
                 origvalues.update(newphases)
             return origvalues
 
         extensions.wrapfunction(
-            localrepo.localrepository, 'listkeys', _listkeys
+            localrepo.localrepository, b'listkeys', _listkeys
         )
         wrappedlistkeys = True
         heads = list((set(newheads) | set(heads)) - set(scratchheads))
@@ -611,10 +613,10 @@
         )
     finally:
         if wrappedchangegrouppart:
-            exchange.getbundle2partsmapping['changegroup'] = oldchangegrouppart
+            exchange.getbundle2partsmapping[b'changegroup'] = oldchangegrouppart
         if wrappedlistkeys:
             extensions.unwrapfunction(
-                localrepo.localrepository, 'listkeys', _listkeys
+                localrepo.localrepository, b'listkeys', _listkeys
             )
     return result
 
@@ -626,64 +628,67 @@
         if isinstance(localkey, str) and _scratchbranchmatcher(localkey):
             scratchnode = repo.bundlestore.index.getnode(localkey)
             if scratchnode:
-                return "%d %s\n" % (1, scratchnode)
+                return b"%d %s\n" % (1, scratchnode)
             else:
-                return "%d %s\n" % (0, 'scratch branch %s not found' % localkey)
+                return b"%d %s\n" % (
+                    0,
+                    b'scratch branch %s not found' % localkey,
+                )
         else:
             try:
                 r = hex(repo.lookup(localkey))
-                return "%d %s\n" % (1, r)
+                return b"%d %s\n" % (1, r)
             except Exception as inst:
                 if repo.bundlestore.index.getbundle(localkey):
-                    return "%d %s\n" % (1, localkey)
+                    return b"%d %s\n" % (1, localkey)
                 else:
                     r = stringutil.forcebytestr(inst)
-                    return "%d %s\n" % (0, r)
+                    return b"%d %s\n" % (0, r)
 
     return _lookup
 
 
-def _pull(orig, ui, repo, source="default", **opts):
+def _pull(orig, ui, repo, source=b"default", **opts):
     opts = pycompat.byteskwargs(opts)
     # Copy paste from `pull` command
-    source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
+    source, branches = hg.parseurl(ui.expandpath(source), opts.get(b'branch'))
 
     scratchbookmarks = {}
     unfi = repo.unfiltered()
     unknownnodes = []
-    for rev in opts.get('rev', []):
+    for rev in opts.get(b'rev', []):
         if rev not in unfi:
             unknownnodes.append(rev)
-    if opts.get('bookmark'):
+    if opts.get(b'bookmark'):
         bookmarks = []
-        revs = opts.get('rev') or []
-        for bookmark in opts.get('bookmark'):
+        revs = opts.get(b'rev') or []
+        for bookmark in opts.get(b'bookmark'):
             if _scratchbranchmatcher(bookmark):
                 # rev is not known yet
                 # it will be fetched with listkeyspatterns next
-                scratchbookmarks[bookmark] = 'REVTOFETCH'
+                scratchbookmarks[bookmark] = b'REVTOFETCH'
             else:
                 bookmarks.append(bookmark)
 
         if scratchbookmarks:
             other = hg.peer(repo, opts, source)
             fetchedbookmarks = other.listkeyspatterns(
-                'bookmarks', patterns=scratchbookmarks
+                b'bookmarks', patterns=scratchbookmarks
             )
             for bookmark in scratchbookmarks:
                 if bookmark not in fetchedbookmarks:
                     raise error.Abort(
-                        'remote bookmark %s not found!' % bookmark
+                        b'remote bookmark %s not found!' % bookmark
                     )
                 scratchbookmarks[bookmark] = fetchedbookmarks[bookmark]
                 revs.append(fetchedbookmarks[bookmark])
-        opts['bookmark'] = bookmarks
-        opts['rev'] = revs
+        opts[b'bookmark'] = bookmarks
+        opts[b'rev'] = revs
 
     if scratchbookmarks or unknownnodes:
         # Set anyincoming to True
         extensions.wrapfunction(
-            discovery, 'findcommonincoming', _findcommonincoming
+            discovery, b'findcommonincoming', _findcommonincoming
         )
     try:
         # Remote scratch bookmarks will be deleted because remotenames doesn't
@@ -701,12 +706,12 @@
         return result
     finally:
         if scratchbookmarks:
-            extensions.unwrapfunction(discovery, 'findcommonincoming')
+            extensions.unwrapfunction(discovery, b'findcommonincoming')
 
 
 def _readscratchremotebookmarks(ui, repo, other):
     if common.isremotebooksenabled(ui):
-        remotenamesext = extensions.find('remotenames')
+        remotenamesext = extensions.find(b'remotenames')
         remotepath = remotenamesext.activepath(repo.ui, other)
         result = {}
         # Let's refresh remotenames to make sure we have it up to date
@@ -714,10 +719,10 @@
         # and it results in deleting scratch bookmarks. Our best guess how to
         # fix it is to use `clearnames()`
         repo._remotenames.clearnames()
-        for remotebookmark in repo.names['remotebookmarks'].listnames(repo):
+        for remotebookmark in repo.names[b'remotebookmarks'].listnames(repo):
             path, bookname = remotenamesext.splitremotename(remotebookmark)
             if path == remotepath and _scratchbranchmatcher(bookname):
-                nodes = repo.names['remotebookmarks'].nodes(
+                nodes = repo.names[b'remotebookmarks'].nodes(
                     repo, remotebookmark
                 )
                 if nodes:
@@ -728,7 +733,7 @@
 
 
 def _saveremotebookmarks(repo, newbookmarks, remote):
-    remotenamesext = extensions.find('remotenames')
+    remotenamesext = extensions.find(b'remotenames')
     remotepath = remotenamesext.activepath(repo.ui, remote)
     branches = collections.defaultdict(list)
     bookmarks = {}
@@ -736,14 +741,14 @@
     for hexnode, nametype, remote, rname in remotenames:
         if remote != remotepath:
             continue
-        if nametype == 'bookmarks':
+        if nametype == b'bookmarks':
             if rname in newbookmarks:
                 # It's possible if we have a normal bookmark that matches
                 # scratch branch pattern. In this case just use the current
                 # bookmark node
                 del newbookmarks[rname]
             bookmarks[rname] = hexnode
-        elif nametype == 'branches':
+        elif nametype == b'branches':
             # saveremotenames expects 20 byte binary nodes for branches
             branches[rname].append(bin(hexnode))
 
@@ -755,7 +760,7 @@
 def _savelocalbookmarks(repo, bookmarks):
     if not bookmarks:
         return
-    with repo.wlock(), repo.lock(), repo.transaction('bookmark') as tr:
+    with repo.wlock(), repo.lock(), repo.transaction(b'bookmark') as tr:
         changes = []
         for scratchbook, node in bookmarks.iteritems():
             changectx = repo[node]
@@ -770,38 +775,38 @@
 
 def _push(orig, ui, repo, dest=None, *args, **opts):
     opts = pycompat.byteskwargs(opts)
-    bookmark = opts.get('bookmark')
+    bookmark = opts.get(b'bookmark')
     # we only support pushing one infinitepush bookmark at once
     if len(bookmark) == 1:
         bookmark = bookmark[0]
     else:
-        bookmark = ''
+        bookmark = b''
 
     oldphasemove = None
     overrides = {(experimental, configbookmark): bookmark}
 
-    with ui.configoverride(overrides, 'infinitepush'):
-        scratchpush = opts.get('bundle_store')
+    with ui.configoverride(overrides, b'infinitepush'):
+        scratchpush = opts.get(b'bundle_store')
         if _scratchbranchmatcher(bookmark):
             scratchpush = True
             # bundle2 can be sent back after push (for example, bundle2
             # containing `pushkey` part to update bookmarks)
-            ui.setconfig(experimental, 'bundle2.pushback', True)
+            ui.setconfig(experimental, b'bundle2.pushback', True)
 
         if scratchpush:
             # this is an infinitepush, we don't want the bookmark to be applied
             # rather that should be stored in the bundlestore
-            opts['bookmark'] = []
+            opts[b'bookmark'] = []
             ui.setconfig(experimental, configscratchpush, True)
             oldphasemove = extensions.wrapfunction(
-                exchange, '_localphasemove', _phasemove
+                exchange, b'_localphasemove', _phasemove
             )
         # Copy-paste from `push` command
-        path = ui.paths.getpath(dest, default=('default-push', 'default'))
+        path = ui.paths.getpath(dest, default=(b'default-push', b'default'))
         if not path:
             raise error.Abort(
-                _('default repository not configured!'),
-                hint=_("see 'hg help config.paths'"),
+                _(b'default repository not configured!'),
+                hint=_(b"see 'hg help config.paths'"),
             )
         destpath = path.pushloc or path.loc
         # Remote scratch bookmarks will be deleted because remotenames doesn't
@@ -812,7 +817,7 @@
             if bookmark and scratchpush:
                 other = hg.peer(repo, opts, destpath)
                 fetchedbookmarks = other.listkeyspatterns(
-                    'bookmarks', patterns=[bookmark]
+                    b'bookmarks', patterns=[bookmark]
                 )
                 remotescratchbookmarks.update(fetchedbookmarks)
             _saveremotebookmarks(repo, remotescratchbookmarks, destpath)
@@ -825,7 +830,7 @@
     """Prune remote names by removing the bookmarks we don't want anymore,
     then writing the result back to disk
     """
-    remotenamesext = extensions.find('remotenames')
+    remotenamesext = extensions.find(b'remotenames')
 
     # remotename format is:
     # (node, nametype ("branches" or "bookmarks"), remote, name)
@@ -840,23 +845,24 @@
     remote_bm_names = [
         remotename[name_idx]
         for remotename in remotenames
-        if remotename[nametype_idx] == "bookmarks"
+        if remotename[nametype_idx] == b"bookmarks"
     ]
 
     for name in names:
         if name not in remote_bm_names:
             raise error.Abort(
                 _(
-                    "infinitepush bookmark '{}' does not exist " "in path '{}'"
+                    b"infinitepush bookmark '{}' does not exist "
+                    b"in path '{}'"
                 ).format(name, path)
             )
 
     bookmarks = {}
     branches = collections.defaultdict(list)
     for node, nametype, remote, name in remotenames:
-        if nametype == "bookmarks" and name not in names:
+        if nametype == b"bookmarks" and name not in names:
             bookmarks[name] = node
-        elif nametype == "branches":
+        elif nametype == b"branches":
             # saveremotenames wants binary nodes for branches
             branches[name].append(bin(node))
 
@@ -877,22 +883,22 @@
 def partgen(pushop, bundler):
     bookmark = pushop.ui.config(experimental, configbookmark)
     scratchpush = pushop.ui.configbool(experimental, configscratchpush)
-    if 'changesets' in pushop.stepsdone or not scratchpush:
+    if b'changesets' in pushop.stepsdone or not scratchpush:
         return
 
     if scratchbranchparttype not in bundle2.bundle2caps(pushop.remote):
         return
 
-    pushop.stepsdone.add('changesets')
+    pushop.stepsdone.add(b'changesets')
     if not pushop.outgoing.missing:
-        pushop.ui.status(_('no changes found\n'))
+        pushop.ui.status(_(b'no changes found\n'))
         pushop.cgresult = 0
         return
 
     # This parameter tells the server that the following bundle is an
     # infinitepush. This let's it switch the part processing to our infinitepush
     # code path.
-    bundler.addparam("infinitepush", "True")
+    bundler.addparam(b"infinitepush", b"True")
 
     scratchparts = bundleparts.getscratchbranchparts(
         pushop.repo, pushop.remote, pushop.outgoing, pushop.ui, bookmark
@@ -912,15 +918,15 @@
 
 
 def _getrevs(bundle, oldnode, force, bookmark):
-    'extracts and validates the revs to be imported'
-    revs = [bundle[r] for r in bundle.revs('sort(bundle())')]
+    b'extracts and validates the revs to be imported'
+    revs = [bundle[r] for r in bundle.revs(b'sort(bundle())')]
 
     # new bookmark
     if oldnode is None:
         return revs
 
     # Fast forward update
-    if oldnode in bundle and list(bundle.set('bundle() & %s::', oldnode)):
+    if oldnode in bundle and list(bundle.set(b'bundle() & %s::', oldnode)):
         return revs
 
     return revs
@@ -929,19 +935,19 @@
 @contextlib.contextmanager
 def logservicecall(logger, service, **kwargs):
     start = time.time()
-    logger(service, eventtype='start', **kwargs)
+    logger(service, eventtype=b'start', **kwargs)
     try:
         yield
         logger(
             service,
-            eventtype='success',
+            eventtype=b'success',
             elapsedms=(time.time() - start) * 1000,
             **kwargs
         )
     except Exception as e:
         logger(
             service,
-            eventtype='failure',
+            eventtype=b'failure',
             elapsedms=(time.time() - start) * 1000,
             errormsg=str(e),
             **kwargs
@@ -950,13 +956,13 @@
 
 
 def _getorcreateinfinitepushlogger(op):
-    logger = op.records['infinitepushlogger']
+    logger = op.records[b'infinitepushlogger']
     if not logger:
         ui = op.repo.ui
         try:
             username = procutil.getuser()
         except Exception:
-            username = 'unknown'
+            username = b'unknown'
         # Generate random request id to be able to find all logged entries
         # for the same request. Since requestid is pseudo-generated it may
         # not be unique, but we assume that (hostname, username, requestid)
@@ -966,13 +972,13 @@
         hostname = socket.gethostname()
         logger = functools.partial(
             ui.log,
-            'infinitepush',
+            b'infinitepush',
             user=username,
             requestid=requestid,
             hostname=hostname,
-            reponame=ui.config('infinitepush', 'reponame'),
+            reponame=ui.config(b'infinitepush', b'reponame'),
         )
-        op.records.add('infinitepushlogger', logger)
+        op.records.add(b'infinitepushlogger', logger)
     else:
         logger = logger[0]
     return logger
@@ -982,14 +988,14 @@
     """stores the incoming bundle coming from push command to the bundlestore
     instead of applying on the revlogs"""
 
-    repo.ui.status(_("storing changesets on the bundlestore\n"))
+    repo.ui.status(_(b"storing changesets on the bundlestore\n"))
     bundler = bundle2.bundle20(repo.ui)
 
     # processing each part and storing it in bundler
     with bundle2.partiterator(repo, op, unbundler) as parts:
         for part in parts:
             bundlepart = None
-            if part.type == 'replycaps':
+            if part.type == b'replycaps':
                 # This configures the current operation to allow reply parts.
                 bundle2._processpart(op, part)
             else:
@@ -998,15 +1004,15 @@
                     bundlepart.addparam(key, value)
 
                 # Certain parts require a response
-                if part.type in ('pushkey', 'changegroup'):
+                if part.type in (b'pushkey', b'changegroup'):
                     if op.reply is not None:
-                        rpart = op.reply.newpart('reply:%s' % part.type)
+                        rpart = op.reply.newpart(b'reply:%s' % part.type)
                         rpart.addparam(
-                            'in-reply-to', b'%d' % part.id, mandatory=False
+                            b'in-reply-to', b'%d' % part.id, mandatory=False
                         )
-                        rpart.addparam('return', '1', mandatory=False)
+                        rpart.addparam(b'return', b'1', mandatory=False)
 
-            op.records.add(part.type, {'return': 1,})
+            op.records.add(part.type, {b'return': 1,})
             if bundlepart:
                 bundler.addpart(bundlepart)
 
@@ -1031,24 +1037,24 @@
 def processparts(orig, repo, op, unbundler):
 
     # make sure we don't wrap processparts in case of `hg unbundle`
-    if op.source == 'unbundle':
+    if op.source == b'unbundle':
         return orig(repo, op, unbundler)
 
     # this server routes each push to bundle store
-    if repo.ui.configbool('infinitepush', 'pushtobundlestore'):
+    if repo.ui.configbool(b'infinitepush', b'pushtobundlestore'):
         return storetobundlestore(orig, repo, op, unbundler)
 
-    if unbundler.params.get('infinitepush') != 'True':
+    if unbundler.params.get(b'infinitepush') != b'True':
         return orig(repo, op, unbundler)
 
-    handleallparts = repo.ui.configbool('infinitepush', 'storeallparts')
+    handleallparts = repo.ui.configbool(b'infinitepush', b'storeallparts')
 
     bundler = bundle2.bundle20(repo.ui)
     cgparams = None
     with bundle2.partiterator(repo, op, unbundler) as parts:
         for part in parts:
             bundlepart = None
-            if part.type == 'replycaps':
+            if part.type == b'replycaps':
                 # This configures the current operation to allow reply parts.
                 bundle2._processpart(op, part)
             elif part.type == bundleparts.scratchbranchparttype:
@@ -1057,18 +1063,22 @@
                 # when we upload to the store. Eventually those parameters will
                 # be put on the actual bundle instead of this part, then we can
                 # send a vanilla changegroup instead of the scratchbranch part.
-                cgversion = part.params.get('cgversion', '01')
-                bundlepart = bundle2.bundlepart('changegroup', data=part.read())
-                bundlepart.addparam('version', cgversion)
+                cgversion = part.params.get(b'cgversion', b'01')
+                bundlepart = bundle2.bundlepart(
+                    b'changegroup', data=part.read()
+                )
+                bundlepart.addparam(b'version', cgversion)
                 cgparams = part.params
 
                 # If we're not dumping all parts into the new bundle, we need to
                 # alert the future pushkey and phase-heads handler to skip
                 # the part.
                 if not handleallparts:
-                    op.records.add(scratchbranchparttype + '_skippushkey', True)
                     op.records.add(
-                        scratchbranchparttype + '_skipphaseheads', True
+                        scratchbranchparttype + b'_skippushkey', True
+                    )
+                    op.records.add(
+                        scratchbranchparttype + b'_skipphaseheads', True
                     )
             else:
                 if handleallparts:
@@ -1081,18 +1091,18 @@
                         bundlepart.addparam(key, value)
 
                     # Certain parts require a response
-                    if part.type == 'pushkey':
+                    if part.type == b'pushkey':
                         if op.reply is not None:
-                            rpart = op.reply.newpart('reply:pushkey')
+                            rpart = op.reply.newpart(b'reply:pushkey')
                             rpart.addparam(
-                                'in-reply-to', str(part.id), mandatory=False
+                                b'in-reply-to', str(part.id), mandatory=False
                             )
-                            rpart.addparam('return', '1', mandatory=False)
+                            rpart.addparam(b'return', b'1', mandatory=False)
                 else:
                     bundle2._processpart(op, part)
 
             if handleallparts:
-                op.records.add(part.type, {'return': 1,})
+                op.records.add(part.type, {b'return': 1,})
             if bundlepart:
                 bundler.addpart(bundlepart)
 
@@ -1118,44 +1128,44 @@
 def storebundle(op, params, bundlefile):
     log = _getorcreateinfinitepushlogger(op)
     parthandlerstart = time.time()
-    log(scratchbranchparttype, eventtype='start')
+    log(scratchbranchparttype, eventtype=b'start')
     index = op.repo.bundlestore.index
     store = op.repo.bundlestore.store
-    op.records.add(scratchbranchparttype + '_skippushkey', True)
+    op.records.add(scratchbranchparttype + b'_skippushkey', True)
 
     bundle = None
     try:  # guards bundle
-        bundlepath = "bundle:%s+%s" % (op.repo.root, bundlefile)
+        bundlepath = b"bundle:%s+%s" % (op.repo.root, bundlefile)
         bundle = hg.repository(op.repo.ui, bundlepath)
 
-        bookmark = params.get('bookmark')
-        bookprevnode = params.get('bookprevnode', '')
-        force = params.get('force')
+        bookmark = params.get(b'bookmark')
+        bookprevnode = params.get(b'bookprevnode', b'')
+        force = params.get(b'force')
 
         if bookmark:
             oldnode = index.getnode(bookmark)
         else:
             oldnode = None
-        bundleheads = bundle.revs('heads(bundle())')
+        bundleheads = bundle.revs(b'heads(bundle())')
         if bookmark and len(bundleheads) > 1:
             raise error.Abort(
-                _('cannot push more than one head to a scratch branch')
+                _(b'cannot push more than one head to a scratch branch')
             )
 
         revs = _getrevs(bundle, oldnode, force, bookmark)
 
         # Notify the user of what is being pushed
-        plural = 's' if len(revs) > 1 else ''
-        op.repo.ui.warn(_("pushing %d commit%s:\n") % (len(revs), plural))
+        plural = b's' if len(revs) > 1 else b''
+        op.repo.ui.warn(_(b"pushing %d commit%s:\n") % (len(revs), plural))
         maxoutput = 10
         for i in range(0, min(len(revs), maxoutput)):
-            firstline = bundle[revs[i]].description().split('\n')[0][:50]
-            op.repo.ui.warn("    %s  %s\n" % (revs[i], firstline))
+            firstline = bundle[revs[i]].description().split(b'\n')[0][:50]
+            op.repo.ui.warn(b"    %s  %s\n" % (revs[i], firstline))
 
         if len(revs) > maxoutput + 1:
-            op.repo.ui.warn("    ...\n")
-            firstline = bundle[revs[-1]].description().split('\n')[0][:50]
-            op.repo.ui.warn("    %s  %s\n" % (revs[-1], firstline))
+            op.repo.ui.warn(b"    ...\n")
+            firstline = bundle[revs[-1]].description().split(b'\n')[0][:50]
+            op.repo.ui.warn(b"    %s  %s\n" % (revs[-1], firstline))
 
         nodesctx = [bundle[rev] for rev in revs]
         inindex = lambda rev: bool(index.getbundle(bundle[rev].hex()))
@@ -1170,21 +1180,21 @@
         bookmarknode = nodesctx[-1].hex() if nodesctx else None
         key = None
         if newheadscount:
-            with open(bundlefile, 'rb') as f:
+            with open(bundlefile, b'rb') as f:
                 bundledata = f.read()
                 with logservicecall(
-                    log, 'bundlestore', bundlesize=len(bundledata)
+                    log, b'bundlestore', bundlesize=len(bundledata)
                 ):
                     bundlesizelimit = 100 * 1024 * 1024  # 100 MB
                     if len(bundledata) > bundlesizelimit:
                         error_msg = (
-                            'bundle is too big: %d bytes. '
-                            + 'max allowed size is 100 MB'
+                            b'bundle is too big: %d bytes. '
+                            + b'max allowed size is 100 MB'
                         )
                         raise error.Abort(error_msg % (len(bundledata),))
                     key = store.write(bundledata)
 
-        with logservicecall(log, 'index', newheadscount=newheadscount), index:
+        with logservicecall(log, b'index', newheadscount=newheadscount), index:
             if key:
                 index.addbundle(key, nodesctx)
             if bookmark:
@@ -1194,14 +1204,14 @@
                 )
         log(
             scratchbranchparttype,
-            eventtype='success',
+            eventtype=b'success',
             elapsedms=(time.time() - parthandlerstart) * 1000,
         )
 
     except Exception as e:
         log(
             scratchbranchparttype,
-            eventtype='failure',
+            eventtype=b'failure',
             elapsedms=(time.time() - parthandlerstart) * 1000,
             errormsg=str(e),
         )
@@ -1213,15 +1223,21 @@
 
 @bundle2.parthandler(
     scratchbranchparttype,
-    ('bookmark', 'bookprevnode', 'force', 'pushbackbookmarks', 'cgversion'),
+    (
+        b'bookmark',
+        b'bookprevnode',
+        b'force',
+        b'pushbackbookmarks',
+        b'cgversion',
+    ),
 )
 def bundle2scratchbranch(op, part):
     '''unbundle a bundle2 part containing a changegroup to store'''
 
     bundler = bundle2.bundle20(op.repo.ui)
-    cgversion = part.params.get('cgversion', '01')
-    cgpart = bundle2.bundlepart('changegroup', data=part.read())
-    cgpart.addparam('version', cgversion)
+    cgversion = part.params.get(b'cgversion', b'01')
+    cgpart = bundle2.bundlepart(b'changegroup', data=part.read())
+    cgpart.addparam(b'version', cgversion)
     bundler.addpart(cgpart)
     buf = util.chunkbuffer(bundler.getchunks())
 
@@ -1244,15 +1260,15 @@
 
 
 def _maybeaddpushbackpart(op, bookmark, newnode, oldnode, params):
-    if params.get('pushbackbookmarks'):
-        if op.reply and 'pushback' in op.reply.capabilities:
+    if params.get(b'pushbackbookmarks'):
+        if op.reply and b'pushback' in op.reply.capabilities:
             params = {
-                'namespace': 'bookmarks',
-                'key': bookmark,
-                'new': newnode,
-                'old': oldnode,
+                b'namespace': b'bookmarks',
+                b'key': bookmark,
+                b'new': newnode,
+                b'old': oldnode,
             }
-            op.reply.newpart('pushkey', mandatoryparams=params.iteritems())
+            op.reply.newpart(b'pushkey', mandatoryparams=params.iteritems())
 
 
 def bundle2pushkey(orig, op, part):
@@ -1261,11 +1277,11 @@
     The only goal is to skip calling the original function if flag is set.
     It's set if infinitepush push is happening.
     '''
-    if op.records[scratchbranchparttype + '_skippushkey']:
+    if op.records[scratchbranchparttype + b'_skippushkey']:
         if op.reply is not None:
-            rpart = op.reply.newpart('reply:pushkey')
-            rpart.addparam('in-reply-to', str(part.id), mandatory=False)
-            rpart.addparam('return', '1', mandatory=False)
+            rpart = op.reply.newpart(b'reply:pushkey')
+            rpart.addparam(b'in-reply-to', str(part.id), mandatory=False)
+            rpart.addparam(b'return', b'1', mandatory=False)
         return 1
 
     return orig(op, part)
@@ -1278,7 +1294,7 @@
     It's set if infinitepush push is happening.
     '''
 
-    if op.records[scratchbranchparttype + '_skipphaseheads']:
+    if op.records[scratchbranchparttype + b'_skipphaseheads']:
         return
 
     return orig(op, part)
@@ -1296,13 +1312,13 @@
         return
     nodesargs = []
     for node in nodes:
-        nodesargs.append('--node')
+        nodesargs.append(b'--node')
         nodesargs.append(node)
-    with open(os.devnull, 'w+b') as devnull:
+    with open(os.devnull, b'w+b') as devnull:
         cmdline = [
             util.hgexecutable(),
-            'debugfillinfinitepushmetadata',
-            '-R',
+            b'debugfillinfinitepushmetadata',
+            b'-R',
             root,
         ] + nodesargs
         # Process will run in background. We don't care about the return code
--- a/hgext/infinitepush/bundleparts.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/infinitepush/bundleparts.py	Sun Oct 06 09:48:39 2019 -0400
@@ -21,42 +21,44 @@
 
 isremotebooksenabled = common.isremotebooksenabled
 
-scratchbranchparttype = 'b2x:infinitepush'
+scratchbranchparttype = b'b2x:infinitepush'
 
 
 def getscratchbranchparts(repo, peer, outgoing, ui, bookmark):
     if not outgoing.missing:
-        raise error.Abort(_('no commits to push'))
+        raise error.Abort(_(b'no commits to push'))
 
     if scratchbranchparttype not in bundle2.bundle2caps(peer):
-        raise error.Abort(_('no server support for %r') % scratchbranchparttype)
+        raise error.Abort(
+            _(b'no server support for %r') % scratchbranchparttype
+        )
 
     _validaterevset(
-        repo, revsetlang.formatspec('%ln', outgoing.missing), bookmark
+        repo, revsetlang.formatspec(b'%ln', outgoing.missing), bookmark
     )
 
     supportedversions = changegroup.supportedoutgoingversions(repo)
     # Explicitly avoid using '01' changegroup version in infinitepush to
     # support general delta
-    supportedversions.discard('01')
+    supportedversions.discard(b'01')
     cgversion = min(supportedversions)
     _handlelfs(repo, outgoing.missing)
-    cg = changegroup.makestream(repo, outgoing, cgversion, 'push')
+    cg = changegroup.makestream(repo, outgoing, cgversion, b'push')
 
     params = {}
-    params['cgversion'] = cgversion
+    params[b'cgversion'] = cgversion
     if bookmark:
-        params['bookmark'] = bookmark
+        params[b'bookmark'] = bookmark
         # 'prevbooknode' is necessary for pushkey reply part
-        params['bookprevnode'] = ''
+        params[b'bookprevnode'] = b''
         bookmarks = repo._bookmarks
         if bookmark in bookmarks:
-            params['bookprevnode'] = nodemod.hex(bookmarks[bookmark])
+            params[b'bookprevnode'] = nodemod.hex(bookmarks[bookmark])
 
     # Do not send pushback bundle2 part with bookmarks if remotenames extension
     # is enabled. It will be handled manually in `_push()`
     if not isremotebooksenabled(ui):
-        params['pushbackbookmarks'] = '1'
+        params[b'pushbackbookmarks'] = b'1'
 
     parts = []
 
@@ -76,13 +78,13 @@
 def _validaterevset(repo, revset, bookmark):
     """Abort if the revs to be pushed aren't valid for a scratch branch."""
     if not repo.revs(revset):
-        raise error.Abort(_('nothing to push'))
+        raise error.Abort(_(b'nothing to push'))
     if bookmark:
         # Allow bundle with many heads only if no bookmark is specified
-        heads = repo.revs('heads(%r)', revset)
+        heads = repo.revs(b'heads(%r)', revset)
         if len(heads) > 1:
             raise error.Abort(
-                _('cannot push more than one head to a scratch branch')
+                _(b'cannot push more than one head to a scratch branch')
             )
 
 
@@ -93,7 +95,7 @@
     to make sure large files are uploaded to lfs
     '''
     try:
-        lfsmod = extensions.find('lfs')
+        lfsmod = extensions.find(b'lfs')
         lfsmod.wrapper.uploadblobsfromrevs(repo, missing)
     except KeyError:
         # Ignore if lfs extension is not enabled
--- a/hgext/infinitepush/common.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/infinitepush/common.py	Sun Oct 06 09:48:39 2019 -0400
@@ -17,8 +17,8 @@
 
 
 def isremotebooksenabled(ui):
-    return 'remotenames' in extensions._extensions and ui.configbool(
-        'remotenames', 'bookmarks'
+    return b'remotenames' in extensions._extensions and ui.configbool(
+        b'remotenames', b'bookmarks'
     )
 
 
@@ -27,7 +27,7 @@
     store = repo.bundlestore.store
     bundleid = index.getbundle(hex(unknownbinhead))
     if bundleid is None:
-        raise error.Abort('%s head is not known' % hex(unknownbinhead))
+        raise error.Abort(b'%s head is not known' % hex(unknownbinhead))
     bundleraw = store.read(bundleid)
     return _makebundlefromraw(bundleraw)
 
--- a/hgext/infinitepush/fileindexapi.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/infinitepush/fileindexapi.py	Sun Oct 06 09:48:39 2019 -0400
@@ -26,13 +26,13 @@
     def __init__(self, repo):
         super(fileindexapi, self).__init__()
         self._repo = repo
-        root = repo.ui.config('infinitepush', 'indexpath')
+        root = repo.ui.config(b'infinitepush', b'indexpath')
         if not root:
-            root = os.path.join('scratchbranches', 'index')
+            root = os.path.join(b'scratchbranches', b'index')
 
-        self._nodemap = os.path.join(root, 'nodemap')
-        self._bookmarkmap = os.path.join(root, 'bookmarkmap')
-        self._metadatamap = os.path.join(root, 'nodemetadatamap')
+        self._nodemap = os.path.join(root, b'nodemap')
+        self._bookmarkmap = os.path.join(root, b'bookmarkmap')
+        self._metadatamap = os.path.join(root, b'nodemetadatamap')
         self._lock = None
 
     def __enter__(self):
@@ -78,8 +78,8 @@
         vfs.write(os.path.join(self._metadatamap, node), jsonmetadata)
 
     def _listbookmarks(self, pattern):
-        if pattern.endswith('*'):
-            pattern = 're:^' + pattern[:-1] + '.*'
+        if pattern.endswith(b'*'):
+            pattern = b're:^' + pattern[:-1] + b'.*'
         kind, pat, matcher = stringutil.stringmatcher(pattern)
         prefixlen = len(self._bookmarkmap) + 1
         for dirpath, _, books in self._repo.vfs.walk(self._bookmarkmap):
--- a/hgext/infinitepush/sqlindexapi.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/infinitepush/sqlindexapi.py	Sun Oct 06 09:48:39 2019 -0400
@@ -18,10 +18,10 @@
 
 
 def _convertbookmarkpattern(pattern):
-    pattern = pattern.replace('_', '\\_')
-    pattern = pattern.replace('%', '\\%')
-    if pattern.endswith('*'):
-        pattern = pattern[:-1] + '%'
+    pattern = pattern.replace(b'_', b'\\_')
+    pattern = pattern.replace(b'%', b'\\%')
+    if pattern.endswith(b'*'):
+        pattern = pattern[:-1] + b'%'
     return pattern
 
 
@@ -46,11 +46,11 @@
         super(sqlindexapi, self).__init__()
         self.reponame = reponame
         self.sqlargs = {
-            'host': host,
-            'port': port,
-            'database': database,
-            'user': user,
-            'password': password,
+            b'host': host,
+            b'port': port,
+            b'database': database,
+            b'user': user,
+            b'password': password,
         }
         self.sqlconn = None
         self.sqlcursor = None
@@ -65,10 +65,10 @@
 
     def sqlconnect(self):
         if self.sqlconn:
-            raise indexapi.indexexception("SQL connection already open")
+            raise indexapi.indexexception(b"SQL connection already open")
         if self.sqlcursor:
             raise indexapi.indexexception(
-                "SQL cursor already open without" " connection"
+                b"SQL cursor already open without" b" connection"
             )
         retry = 3
         while True:
@@ -90,19 +90,19 @@
                     raise
                 time.sleep(0.2)
 
-        waittimeout = self.sqlconn.converter.escape('%s' % self._waittimeout)
+        waittimeout = self.sqlconn.converter.escape(b'%s' % self._waittimeout)
 
         self.sqlcursor = self.sqlconn.cursor()
-        self.sqlcursor.execute("SET wait_timeout=%s" % waittimeout)
+        self.sqlcursor.execute(b"SET wait_timeout=%s" % waittimeout)
         self.sqlcursor.execute(
-            "SET innodb_lock_wait_timeout=%s" % self._locktimeout
+            b"SET innodb_lock_wait_timeout=%s" % self._locktimeout
         )
         self._connected = True
 
     def close(self):
         """Cleans up the metadata store connection."""
         with warnings.catch_warnings():
-            warnings.simplefilter("ignore")
+            warnings.simplefilter(b"ignore")
             self.sqlcursor.close()
             self.sqlconn.close()
         self.sqlcursor = None
@@ -122,29 +122,29 @@
     def addbundle(self, bundleid, nodesctx):
         if not self._connected:
             self.sqlconnect()
-        self.log.info("ADD BUNDLE %r %r" % (self.reponame, bundleid))
+        self.log.info(b"ADD BUNDLE %r %r" % (self.reponame, bundleid))
         self.sqlcursor.execute(
-            "INSERT INTO bundles(bundle, reponame) VALUES " "(%s, %s)",
+            b"INSERT INTO bundles(bundle, reponame) VALUES " b"(%s, %s)",
             params=(bundleid, self.reponame),
         )
         for ctx in nodesctx:
             self.sqlcursor.execute(
-                "INSERT INTO nodestobundle(node, bundle, reponame) "
-                "VALUES (%s, %s, %s) ON DUPLICATE KEY UPDATE "
-                "bundle=VALUES(bundle)",
+                b"INSERT INTO nodestobundle(node, bundle, reponame) "
+                b"VALUES (%s, %s, %s) ON DUPLICATE KEY UPDATE "
+                b"bundle=VALUES(bundle)",
                 params=(ctx.hex(), bundleid, self.reponame),
             )
 
             extra = ctx.extra()
             author_name = ctx.user()
-            committer_name = extra.get('committer', ctx.user())
+            committer_name = extra.get(b'committer', ctx.user())
             author_date = int(ctx.date()[0])
-            committer_date = int(extra.get('committer_date', author_date))
+            committer_date = int(extra.get(b'committer_date', author_date))
             self.sqlcursor.execute(
-                "INSERT IGNORE INTO nodesmetadata(node, message, p1, p2, "
-                "author, committer, author_date, committer_date, "
-                "reponame) VALUES "
-                "(%s, %s, %s, %s, %s, %s, %s, %s, %s)",
+                b"INSERT IGNORE INTO nodesmetadata(node, message, p1, p2, "
+                b"author, committer, author_date, committer_date, "
+                b"reponame) VALUES "
+                b"(%s, %s, %s, %s, %s, %s, %s, %s, %s)",
                 params=(
                     ctx.hex(),
                     ctx.description(),
@@ -164,12 +164,12 @@
         if not self._connected:
             self.sqlconnect()
         self.log.info(
-            "ADD BOOKMARKS %r bookmark: %r node: %r"
+            b"ADD BOOKMARKS %r bookmark: %r node: %r"
             % (self.reponame, bookmark, node)
         )
         self.sqlcursor.execute(
-            "INSERT INTO bookmarkstonode(bookmark, node, reponame) "
-            "VALUES (%s, %s, %s) ON DUPLICATE KEY UPDATE node=VALUES(node)",
+            b"INSERT INTO bookmarkstonode(bookmark, node, reponame) "
+            b"VALUES (%s, %s, %s) ON DUPLICATE KEY UPDATE node=VALUES(node)",
             params=(bookmark, node, self.reponame),
         )
 
@@ -179,13 +179,13 @@
         args = []
         values = []
         for bookmark, node in bookmarks.iteritems():
-            args.append('(%s, %s, %s)')
+            args.append(b'(%s, %s, %s)')
             values.extend((bookmark, node, self.reponame))
-        args = ','.join(args)
+        args = b','.join(args)
 
         self.sqlcursor.execute(
-            "INSERT INTO bookmarkstonode(bookmark, node, reponame) "
-            "VALUES %s ON DUPLICATE KEY UPDATE node=VALUES(node)" % args,
+            b"INSERT INTO bookmarkstonode(bookmark, node, reponame) "
+            b"VALUES %s ON DUPLICATE KEY UPDATE node=VALUES(node)" % args,
             params=values,
         )
 
@@ -196,12 +196,12 @@
         """
         if not self._connected:
             self.sqlconnect()
-        self.log.info("DELETE BOOKMARKS: %s" % patterns)
+        self.log.info(b"DELETE BOOKMARKS: %s" % patterns)
         for pattern in patterns:
             pattern = _convertbookmarkpattern(pattern)
             self.sqlcursor.execute(
-                "DELETE from bookmarkstonode WHERE bookmark LIKE (%s) "
-                "and reponame = %s",
+                b"DELETE from bookmarkstonode WHERE bookmark LIKE (%s) "
+                b"and reponame = %s",
                 params=(pattern, self.reponame),
             )
 
@@ -209,18 +209,18 @@
         """Returns the bundleid for the bundle that contains the given node."""
         if not self._connected:
             self.sqlconnect()
-        self.log.info("GET BUNDLE %r %r" % (self.reponame, node))
+        self.log.info(b"GET BUNDLE %r %r" % (self.reponame, node))
         self.sqlcursor.execute(
-            "SELECT bundle from nodestobundle "
-            "WHERE node = %s AND reponame = %s",
+            b"SELECT bundle from nodestobundle "
+            b"WHERE node = %s AND reponame = %s",
             params=(node, self.reponame),
         )
         result = self.sqlcursor.fetchall()
         if len(result) != 1 or len(result[0]) != 1:
-            self.log.info("No matching node")
+            self.log.info(b"No matching node")
             return None
         bundle = result[0][0]
-        self.log.info("Found bundle %r" % bundle)
+        self.log.info(b"Found bundle %r" % bundle)
         return bundle
 
     def getnode(self, bookmark):
@@ -228,38 +228,38 @@
         if not self._connected:
             self.sqlconnect()
         self.log.info(
-            "GET NODE reponame: %r bookmark: %r" % (self.reponame, bookmark)
+            b"GET NODE reponame: %r bookmark: %r" % (self.reponame, bookmark)
         )
         self.sqlcursor.execute(
-            "SELECT node from bookmarkstonode WHERE "
-            "bookmark = %s AND reponame = %s",
+            b"SELECT node from bookmarkstonode WHERE "
+            b"bookmark = %s AND reponame = %s",
             params=(bookmark, self.reponame),
         )
         result = self.sqlcursor.fetchall()
         if len(result) != 1 or len(result[0]) != 1:
-            self.log.info("No matching bookmark")
+            self.log.info(b"No matching bookmark")
             return None
         node = result[0][0]
-        self.log.info("Found node %r" % node)
+        self.log.info(b"Found node %r" % node)
         return node
 
     def getbookmarks(self, query):
         if not self._connected:
             self.sqlconnect()
         self.log.info(
-            "QUERY BOOKMARKS reponame: %r query: %r" % (self.reponame, query)
+            b"QUERY BOOKMARKS reponame: %r query: %r" % (self.reponame, query)
         )
         query = _convertbookmarkpattern(query)
         self.sqlcursor.execute(
-            "SELECT bookmark, node from bookmarkstonode WHERE "
-            "reponame = %s AND bookmark LIKE %s",
+            b"SELECT bookmark, node from bookmarkstonode WHERE "
+            b"reponame = %s AND bookmark LIKE %s",
             params=(self.reponame, query),
         )
         result = self.sqlcursor.fetchall()
         bookmarks = {}
         for row in result:
             if len(row) != 2:
-                self.log.info("Bad row returned: %s" % row)
+                self.log.info(b"Bad row returned: %s" % row)
                 continue
             bookmarks[row[0]] = row[1]
         return bookmarks
@@ -269,15 +269,15 @@
             self.sqlconnect()
         self.log.info(
             (
-                "INSERT METADATA, QUERY BOOKMARKS reponame: %r "
-                + "node: %r, jsonmetadata: %s"
+                b"INSERT METADATA, QUERY BOOKMARKS reponame: %r "
+                + b"node: %r, jsonmetadata: %s"
             )
             % (self.reponame, node, jsonmetadata)
         )
 
         self.sqlcursor.execute(
-            "UPDATE nodesmetadata SET optional_json_metadata=%s WHERE "
-            "reponame=%s AND node=%s",
+            b"UPDATE nodesmetadata SET optional_json_metadata=%s WHERE "
+            b"reponame=%s AND node=%s",
             params=(jsonmetadata, self.reponame, node),
         )
 
--- a/hgext/infinitepush/store.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/infinitepush/store.py	Sun Oct 06 09:48:39 2019 -0400
@@ -68,10 +68,10 @@
     def __init__(self, ui, repo):
         self.ui = ui
         self.repo = repo
-        self.storepath = ui.configpath('scratchbranch', 'storepath')
+        self.storepath = ui.configpath(b'scratchbranch', b'storepath')
         if not self.storepath:
             self.storepath = self.repo.vfs.join(
-                "scratchbranches", "filebundlestore"
+                b"scratchbranches", b"filebundlestore"
             )
         if not os.path.exists(self.storepath):
             os.makedirs(self.storepath)
@@ -92,14 +92,14 @@
         if not os.path.exists(dirpath):
             os.makedirs(dirpath)
 
-        with open(self._filepath(filename), 'wb') as f:
+        with open(self._filepath(filename), b'wb') as f:
             f.write(data)
 
         return filename
 
     def read(self, key):
         try:
-            with open(self._filepath(key), 'rb') as f:
+            with open(self._filepath(key), b'rb') as f:
                 return f.read()
         except IOError:
             return None
@@ -152,14 +152,14 @@
 
             if returncode != 0:
                 raise BundleWriteException(
-                    'Failed to upload to external store: %s' % stderr
+                    b'Failed to upload to external store: %s' % stderr
                 )
             stdout_lines = stdout.splitlines()
             if len(stdout_lines) == 1:
                 return stdout_lines[0]
             else:
                 raise BundleWriteException(
-                    'Bad output from %s: %s' % (self.put_binary, stdout)
+                    b'Bad output from %s: %s' % (self.put_binary, stdout)
                 )
 
     def read(self, handle):
@@ -178,6 +178,6 @@
 
             if returncode != 0:
                 raise BundleReadException(
-                    'Failed to download from external store: %s' % stderr
+                    b'Failed to download from external store: %s' % stderr
                 )
             return temp.read()
--- a/hgext/journal.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/journal.py	Sun Oct 06 09:48:39 2019 -0400
@@ -49,14 +49,14 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 # storage format version; increment when the format changes
 storageversion = 0
 
 # namespaces
-bookmarktype = 'bookmark'
-wdirparenttype = 'wdirparent'
+bookmarktype = b'bookmark'
+wdirparenttype = b'wdirparent'
 # In a shared repository, what shared feature name is used
 # to indicate this namespace is shared with the source?
 sharednamespaces = {
@@ -65,21 +65,21 @@
 
 # Journal recording, register hooks and storage object
 def extsetup(ui):
-    extensions.wrapfunction(dispatch, 'runcommand', runcommand)
-    extensions.wrapfunction(bookmarks.bmstore, '_write', recordbookmarks)
+    extensions.wrapfunction(dispatch, b'runcommand', runcommand)
+    extensions.wrapfunction(bookmarks.bmstore, b'_write', recordbookmarks)
     extensions.wrapfilecache(
-        localrepo.localrepository, 'dirstate', wrapdirstate
+        localrepo.localrepository, b'dirstate', wrapdirstate
     )
-    extensions.wrapfunction(hg, 'postshare', wrappostshare)
-    extensions.wrapfunction(hg, 'copystore', unsharejournal)
+    extensions.wrapfunction(hg, b'postshare', wrappostshare)
+    extensions.wrapfunction(hg, b'copystore', unsharejournal)
 
 
 def reposetup(ui, repo):
     if repo.local():
         repo.journal = journalstorage(repo)
-        repo._wlockfreeprefix.add('namejournal')
+        repo._wlockfreeprefix.add(b'namejournal')
 
-        dirstate, cached = localrepo.isfilecached(repo, 'dirstate')
+        dirstate, cached = localrepo.isfilecached(repo, b'dirstate')
         if cached:
             # already instantiated dirstate isn't yet marked as
             # "journal"-ing, even though repo.dirstate() was already
@@ -95,14 +95,14 @@
 
 def _setupdirstate(repo, dirstate):
     dirstate.journalstorage = repo.journal
-    dirstate.addparentchangecallback('journal', recorddirstateparents)
+    dirstate.addparentchangecallback(b'journal', recorddirstateparents)
 
 
 # hooks to record dirstate changes
 def wrapdirstate(orig, repo):
     """Make journal storage available to the dirstate object"""
     dirstate = orig(repo)
-    if util.safehasattr(repo, 'journal'):
+    if util.safehasattr(repo, b'journal'):
         _setupdirstate(repo, dirstate)
     return dirstate
 
@@ -111,12 +111,12 @@
     """Records all dirstate parent changes in the journal."""
     old = list(old)
     new = list(new)
-    if util.safehasattr(dirstate, 'journalstorage'):
+    if util.safehasattr(dirstate, b'journalstorage'):
         # only record two hashes if there was a merge
         oldhashes = old[:1] if old[1] == node.nullid else old
         newhashes = new[:1] if new[1] == node.nullid else new
         dirstate.journalstorage.record(
-            wdirparenttype, '.', oldhashes, newhashes
+            wdirparenttype, b'.', oldhashes, newhashes
         )
 
 
@@ -124,7 +124,7 @@
 def recordbookmarks(orig, store, fp):
     """Records all bookmark changes in the journal."""
     repo = store._repo
-    if util.safehasattr(repo, 'journal'):
+    if util.safehasattr(repo, b'journal'):
         oldmarks = bookmarks.bmstore(repo)
         for mark, value in store.iteritems():
             oldvalue = oldmarks.get(mark, node.nullid)
@@ -137,7 +137,7 @@
 def _readsharedfeatures(repo):
     """A set of shared features for this repository"""
     try:
-        return set(repo.vfs.read('shared').splitlines())
+        return set(repo.vfs.read(b'shared').splitlines())
     except IOError as inst:
         if inst.errno != errno.ENOENT:
             raise
@@ -177,8 +177,8 @@
     """Mark this shared working copy as sharing journal information"""
     with destrepo.wlock():
         orig(sourcerepo, destrepo, **kwargs)
-        with destrepo.vfs('shared', 'a') as fp:
-            fp.write('journal\n')
+        with destrepo.vfs(b'shared', b'a') as fp:
+            fp.write(b'journal\n')
 
 
 def unsharejournal(orig, ui, repo, repopath):
@@ -186,20 +186,20 @@
     if (
         repo.path == repopath
         and repo.shared()
-        and util.safehasattr(repo, 'journal')
+        and util.safehasattr(repo, b'journal')
     ):
         sharedrepo = hg.sharedreposource(repo)
         sharedfeatures = _readsharedfeatures(repo)
-        if sharedrepo and sharedfeatures > {'journal'}:
+        if sharedrepo and sharedfeatures > {b'journal'}:
             # there is a shared repository and there are shared journal entries
             # to copy. move shared date over from source to destination but
             # move the local file first
-            if repo.vfs.exists('namejournal'):
-                journalpath = repo.vfs.join('namejournal')
-                util.rename(journalpath, journalpath + '.bak')
+            if repo.vfs.exists(b'namejournal'):
+                journalpath = repo.vfs.join(b'namejournal')
+                util.rename(journalpath, journalpath + b'.bak')
             storage = repo.journal
             local = storage._open(
-                repo.vfs, filename='namejournal.bak', _newestfirst=False
+                repo.vfs, filename=b'namejournal.bak', _newestfirst=False
             )
             shared = (
                 e
@@ -245,11 +245,11 @@
             name,
             oldhashes,
             newhashes,
-        ) = line.split('\n')
+        ) = line.split(b'\n')
         timestamp, tz = time.split()
         timestamp, tz = float(timestamp), int(tz)
-        oldhashes = tuple(node.bin(hash) for hash in oldhashes.split(','))
-        newhashes = tuple(node.bin(hash) for hash in newhashes.split(','))
+        oldhashes = tuple(node.bin(hash) for hash in oldhashes.split(b','))
+        newhashes = tuple(node.bin(hash) for hash in newhashes.split(b','))
         return cls(
             (timestamp, tz),
             user,
@@ -262,10 +262,10 @@
 
     def __bytes__(self):
         """bytes representation for storage"""
-        time = ' '.join(map(pycompat.bytestr, self.timestamp))
-        oldhashes = ','.join([node.hex(hash) for hash in self.oldhashes])
-        newhashes = ','.join([node.hex(hash) for hash in self.newhashes])
-        return '\n'.join(
+        time = b' '.join(map(pycompat.bytestr, self.timestamp))
+        oldhashes = b','.join([node.hex(hash) for hash in self.oldhashes])
+        newhashes = b','.join([node.hex(hash) for hash in self.newhashes])
+        return b'\n'.join(
             (
                 time,
                 self.user,
@@ -311,19 +311,19 @@
         if repo.shared():
             features = _readsharedfeatures(repo)
             sharedrepo = hg.sharedreposource(repo)
-            if sharedrepo is not None and 'journal' in features:
+            if sharedrepo is not None and b'journal' in features:
                 self.sharedvfs = sharedrepo.vfs
                 self.sharedfeatures = features
 
     # track the current command for recording in journal entries
     @property
     def command(self):
-        commandstr = ' '.join(
+        commandstr = b' '.join(
             map(procutil.shellquote, journalstorage._currentcommand)
         )
-        if '\n' in commandstr:
+        if b'\n' in commandstr:
             # truncate multi-line commands
-            commandstr = commandstr.partition('\n')[0] + ' ...'
+            commandstr = commandstr.partition(b'\n')[0] + b' ...'
         return commandstr
 
     @classmethod
@@ -348,22 +348,22 @@
     def jlock(self, vfs):
         """Create a lock for the journal file"""
         if self._currentlock(self._lockref) is not None:
-            raise error.Abort(_('journal lock does not support nesting'))
-        desc = _('journal of %s') % vfs.base
+            raise error.Abort(_(b'journal lock does not support nesting'))
+        desc = _(b'journal of %s') % vfs.base
         try:
-            l = lock.lock(vfs, 'namejournal.lock', 0, desc=desc)
+            l = lock.lock(vfs, b'namejournal.lock', 0, desc=desc)
         except error.LockHeld as inst:
             self.ui.warn(
-                _("waiting for lock on %s held by %r\n") % (desc, inst.locker)
+                _(b"waiting for lock on %s held by %r\n") % (desc, inst.locker)
             )
             # default to 600 seconds timeout
             l = lock.lock(
                 vfs,
-                'namejournal.lock',
-                self.ui.configint("ui", "timeout"),
+                b'namejournal.lock',
+                self.ui.configint(b"ui", b"timeout"),
                 desc=desc,
             )
-            self.ui.warn(_("got lock after %s seconds\n") % l.delay)
+            self.ui.warn(_(b"got lock after %s seconds\n") % l.delay)
         self._lockref = weakref.ref(l)
         return l
 
@@ -406,25 +406,25 @@
     def _write(self, vfs, entry):
         with self.jlock(vfs):
             # open file in amend mode to ensure it is created if missing
-            with vfs('namejournal', mode='a+b') as f:
+            with vfs(b'namejournal', mode=b'a+b') as f:
                 f.seek(0, os.SEEK_SET)
                 # Read just enough bytes to get a version number (up to 2
                 # digits plus separator)
-                version = f.read(3).partition('\0')[0]
-                if version and version != "%d" % storageversion:
+                version = f.read(3).partition(b'\0')[0]
+                if version and version != b"%d" % storageversion:
                     # different version of the storage. Exit early (and not
                     # write anything) if this is not a version we can handle or
                     # the file is corrupt. In future, perhaps rotate the file
                     # instead?
                     self.ui.warn(
-                        _("unsupported journal file version '%s'\n") % version
+                        _(b"unsupported journal file version '%s'\n") % version
                     )
                     return
                 if not version:
                     # empty file, write version first
-                    f.write(("%d" % storageversion) + '\0')
+                    f.write((b"%d" % storageversion) + b'\0')
                 f.seek(0, os.SEEK_END)
-                f.write(bytes(entry) + '\0')
+                f.write(bytes(entry) + b'\0')
 
     def filtered(self, namespace=None, name=None):
         """Yield all journal entries with the given namespace or name
@@ -467,18 +467,18 @@
         )
         return _mergeentriesiter(local, shared)
 
-    def _open(self, vfs, filename='namejournal', _newestfirst=True):
+    def _open(self, vfs, filename=b'namejournal', _newestfirst=True):
         if not vfs.exists(filename):
             return
 
         with vfs(filename) as f:
             raw = f.read()
 
-        lines = raw.split('\0')
+        lines = raw.split(b'\0')
         version = lines and lines[0]
-        if version != "%d" % storageversion:
-            version = version or _('not available')
-            raise error.Abort(_("unknown journal file version '%s'") % version)
+        if version != b"%d" % storageversion:
+            version = version or _(b'not available')
+            raise error.Abort(_(b"unknown journal file version '%s'") % version)
 
         # Skip the first line, it's a version number. Normally we iterate over
         # these in reverse order to list newest first; only when copying across
@@ -494,17 +494,17 @@
 
 # journal reading
 # log options that don't make sense for journal
-_ignoreopts = ('no-merges', 'graph')
+_ignoreopts = (b'no-merges', b'graph')
 
 
 @command(
-    'journal',
+    b'journal',
     [
-        ('', 'all', None, 'show history for all names'),
-        ('c', 'commits', None, 'show commit metadata'),
+        (b'', b'all', None, b'show history for all names'),
+        (b'c', b'commits', None, b'show commit metadata'),
     ]
     + [opt for opt in cmdutil.logopts if opt[1] not in _ignoreopts],
-    '[OPTION]... [BOOKMARKNAME]',
+    b'[OPTION]... [BOOKMARKNAME]',
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def journal(ui, repo, *args, **opts):
@@ -533,72 +533,72 @@
 
     """
     opts = pycompat.byteskwargs(opts)
-    name = '.'
-    if opts.get('all'):
+    name = b'.'
+    if opts.get(b'all'):
         if args:
             raise error.Abort(
-                _("You can't combine --all and filtering on a name")
+                _(b"You can't combine --all and filtering on a name")
             )
         name = None
     if args:
         name = args[0]
 
-    fm = ui.formatter('journal', opts)
+    fm = ui.formatter(b'journal', opts)
 
     def formatnodes(nodes):
-        return fm.formatlist(map(fm.hexfunc, nodes), name='node', sep=',')
+        return fm.formatlist(map(fm.hexfunc, nodes), name=b'node', sep=b',')
 
-    if opts.get("template") != "json":
+    if opts.get(b"template") != b"json":
         if name is None:
-            displayname = _('the working copy and bookmarks')
+            displayname = _(b'the working copy and bookmarks')
         else:
-            displayname = "'%s'" % name
-        ui.status(_("previous locations of %s:\n") % displayname)
+            displayname = b"'%s'" % name
+        ui.status(_(b"previous locations of %s:\n") % displayname)
 
     limit = logcmdutil.getlimit(opts)
     entry = None
-    ui.pager('journal')
+    ui.pager(b'journal')
     for count, entry in enumerate(repo.journal.filtered(name=name)):
         if count == limit:
             break
 
         fm.startitem()
         fm.condwrite(
-            ui.verbose, 'oldnodes', '%s -> ', formatnodes(entry.oldhashes)
+            ui.verbose, b'oldnodes', b'%s -> ', formatnodes(entry.oldhashes)
         )
-        fm.write('newnodes', '%s', formatnodes(entry.newhashes))
-        fm.condwrite(ui.verbose, 'user', ' %-8s', entry.user)
+        fm.write(b'newnodes', b'%s', formatnodes(entry.newhashes))
+        fm.condwrite(ui.verbose, b'user', b' %-8s', entry.user)
         fm.condwrite(
-            opts.get('all') or name.startswith('re:'),
-            'name',
-            '  %-8s',
+            opts.get(b'all') or name.startswith(b're:'),
+            b'name',
+            b'  %-8s',
             entry.name,
         )
 
         fm.condwrite(
             ui.verbose,
-            'date',
-            ' %s',
-            fm.formatdate(entry.timestamp, '%Y-%m-%d %H:%M %1%2'),
+            b'date',
+            b' %s',
+            fm.formatdate(entry.timestamp, b'%Y-%m-%d %H:%M %1%2'),
         )
-        fm.write('command', '  %s\n', entry.command)
+        fm.write(b'command', b'  %s\n', entry.command)
 
-        if opts.get("commits"):
+        if opts.get(b"commits"):
             if fm.isplain():
                 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
             else:
                 displayer = logcmdutil.changesetformatter(
-                    ui, repo, fm.nested('changesets'), diffopts=opts
+                    ui, repo, fm.nested(b'changesets'), diffopts=opts
                 )
             for hash in entry.newhashes:
                 try:
                     ctx = repo[hash]
                     displayer.show(ctx)
                 except error.RepoLookupError as e:
-                    fm.plain("%s\n\n" % pycompat.bytestr(e))
+                    fm.plain(b"%s\n\n" % pycompat.bytestr(e))
             displayer.close()
 
     fm.end()
 
     if entry is None:
-        ui.status(_("no recorded locations\n"))
+        ui.status(_(b"no recorded locations\n"))
--- a/hgext/keyword.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/keyword.py	Sun Oct 06 09:48:39 2019 -0400
@@ -122,33 +122,33 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 # hg commands that do not act on keywords
 nokwcommands = (
-    'add addremove annotate bundle export grep incoming init log'
-    ' outgoing push tip verify convert email glog'
+    b'add addremove annotate bundle export grep incoming init log'
+    b' outgoing push tip verify convert email glog'
 )
 
 # webcommands that do not act on keywords
-nokwwebcommands = 'annotate changeset rev filediff diff comparison'
+nokwwebcommands = b'annotate changeset rev filediff diff comparison'
 
 # hg commands that trigger expansion only when writing to working dir,
 # not when reading filelog, and unexpand when reading from working dir
 restricted = (
-    'merge kwexpand kwshrink record qrecord resolve transplant'
-    ' unshelve rebase graft backout histedit fetch'
+    b'merge kwexpand kwshrink record qrecord resolve transplant'
+    b' unshelve rebase graft backout histedit fetch'
 )
 
 # names of extensions using dorecord
-recordextensions = 'record'
+recordextensions = b'record'
 
 colortable = {
-    'kwfiles.enabled': 'green bold',
-    'kwfiles.deleted': 'cyan bold underline',
-    'kwfiles.enabledunknown': 'green',
-    'kwfiles.ignored': 'bold',
-    'kwfiles.ignoredunknown': 'none',
+    b'kwfiles.enabled': b'green bold',
+    b'kwfiles.deleted': b'cyan bold underline',
+    b'kwfiles.enabledunknown': b'green',
+    b'kwfiles.ignored': b'bold',
+    b'kwfiles.ignoredunknown': b'none',
 }
 
 templatefilter = registrar.templatefilter()
@@ -157,65 +157,65 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    'keywordset', 'svn', default=False,
+    b'keywordset', b'svn', default=False,
 )
 # date like in cvs' $Date
-@templatefilter('utcdate', intype=templateutil.date)
+@templatefilter(b'utcdate', intype=templateutil.date)
 def utcdate(date):
     '''Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
     '''
-    dateformat = '%Y/%m/%d %H:%M:%S'
+    dateformat = b'%Y/%m/%d %H:%M:%S'
     return dateutil.datestr((date[0], 0), dateformat)
 
 
 # date like in svn's $Date
-@templatefilter('svnisodate', intype=templateutil.date)
+@templatefilter(b'svnisodate', intype=templateutil.date)
 def svnisodate(date):
     '''Date. Returns a date in this format: "2009-08-18 13:00:13
     +0200 (Tue, 18 Aug 2009)".
     '''
-    return dateutil.datestr(date, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
+    return dateutil.datestr(date, b'%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
 
 
 # date like in svn's $Id
-@templatefilter('svnutcdate', intype=templateutil.date)
+@templatefilter(b'svnutcdate', intype=templateutil.date)
 def svnutcdate(date):
     '''Date. Returns a UTC-date in this format: "2009-08-18
     11:00:13Z".
     '''
-    dateformat = '%Y-%m-%d %H:%M:%SZ'
+    dateformat = b'%Y-%m-%d %H:%M:%SZ'
     return dateutil.datestr((date[0], 0), dateformat)
 
 
 # make keyword tools accessible
-kwtools = {'hgcmd': ''}
+kwtools = {b'hgcmd': b''}
 
 
 def _defaultkwmaps(ui):
     '''Returns default keywordmaps according to keywordset configuration.'''
     templates = {
-        'Revision': '{node|short}',
-        'Author': '{author|user}',
+        b'Revision': b'{node|short}',
+        b'Author': b'{author|user}',
     }
     kwsets = (
         {
-            'Date': '{date|utcdate}',
-            'RCSfile': '{file|basename},v',
-            'RCSFile': '{file|basename},v',  # kept for backwards compatibility
+            b'Date': b'{date|utcdate}',
+            b'RCSfile': b'{file|basename},v',
+            b'RCSFile': b'{file|basename},v',  # kept for backwards compatibility
             # with hg-keyword
-            'Source': '{root}/{file},v',
-            'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
-            'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
+            b'Source': b'{root}/{file},v',
+            b'Id': b'{file|basename},v {node|short} {date|utcdate} {author|user}',
+            b'Header': b'{root}/{file},v {node|short} {date|utcdate} {author|user}',
         },
         {
-            'Date': '{date|svnisodate}',
-            'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
-            'LastChangedRevision': '{node|short}',
-            'LastChangedBy': '{author|user}',
-            'LastChangedDate': '{date|svnisodate}',
+            b'Date': b'{date|svnisodate}',
+            b'Id': b'{file|basename},v {node|short} {date|svnutcdate} {author|user}',
+            b'LastChangedRevision': b'{node|short}',
+            b'LastChangedBy': b'{author|user}',
+            b'LastChangedDate': b'{date|svnisodate}',
         },
     )
-    templates.update(kwsets[ui.configbool('keywordset', 'svn')])
+    templates.update(kwsets[ui.configbool(b'keywordset', b'svn')])
     return templates
 
 
@@ -243,11 +243,11 @@
     def __init__(self, ui, repo, inc, exc):
         self.ui = ui
         self._repo = weakref.ref(repo)
-        self.match = match.match(repo.root, '', [], inc, exc)
-        self.restrict = kwtools['hgcmd'] in restricted.split()
+        self.match = match.match(repo.root, b'', [], inc, exc)
+        self.restrict = kwtools[b'hgcmd'] in restricted.split()
         self.postcommit = False
 
-        kwmaps = self.ui.configitems('keywordmaps')
+        kwmaps = self.ui.configitems(b'keywordmaps')
         if kwmaps:  # override default templates
             self.templates = dict(kwmaps)
         else:
@@ -260,7 +260,7 @@
     @util.propertycache
     def escape(self):
         '''Returns bar-separated and escaped keywords.'''
-        return '|'.join(map(stringutil.reescape, self.templates.keys()))
+        return b'|'.join(map(stringutil.reescape, self.templates.keys()))
 
     @util.propertycache
     def rekw(self):
@@ -283,7 +283,7 @@
             self.ui.pushbuffer()
             ct.show(ctx, root=self.repo.root, file=path)
             ekw = templatefilters.firstline(self.ui.popbuffer())
-            return '$%s: %s $' % (kw, ekw)
+            return b'$%s: %s $' % (kw, ekw)
 
         return subfunc(kwsub, data)
 
@@ -305,7 +305,7 @@
     def iskwfile(self, cand, ctx):
         '''Returns subset of candidates which are configured for keyword
         expansion but are not symbolic links.'''
-        return [f for f in cand if self.match(f) and 'l' not in ctx.flags(f)]
+        return [f for f in cand if self.match(f) and b'l' not in ctx.flags(f)]
 
     def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
         '''Overwrites selected files expanding/shrinking keywords.'''
@@ -321,9 +321,9 @@
         else:
             re_kw = self.rekwexp
         if expand:
-            msg = _('overwriting %s expanding keywords\n')
+            msg = _(b'overwriting %s expanding keywords\n')
         else:
-            msg = _('overwriting %s shrinking keywords\n')
+            msg = _(b'overwriting %s shrinking keywords\n')
         for f in candidates:
             if self.restrict:
                 data = self.repo.file(f).read(mf[f])
@@ -350,7 +350,7 @@
                 data, found = _shrinktext(data, re_kw.subn)
             if found:
                 self.ui.note(msg % f)
-                fp = self.repo.wvfs(f, "wb", atomictemp=True)
+                fp = self.repo.wvfs(f, b"wb", atomictemp=True)
                 fp.write(data)
                 fp.close()
                 if kwcmd:
@@ -367,7 +367,7 @@
     def shrinklines(self, fname, lines):
         '''Returns lines with keyword substitutions removed.'''
         if self.match(fname):
-            text = ''.join(lines)
+            text = b''.join(lines)
             if not stringutil.binary(text):
                 return _shrinktext(text, self.rekwexp.sub).splitlines(True)
         return lines
@@ -417,33 +417,33 @@
         return repo.status(
             match=scmutil.match(wctx, pats, opts),
             clean=True,
-            unknown=opts.get('unknown') or opts.get('all'),
+            unknown=opts.get(b'unknown') or opts.get(b'all'),
         )
-    if ui.configitems('keyword'):
-        raise error.Abort(_('[keyword] patterns cannot match'))
-    raise error.Abort(_('no [keyword] patterns configured'))
+    if ui.configitems(b'keyword'):
+        raise error.Abort(_(b'[keyword] patterns cannot match'))
+    raise error.Abort(_(b'no [keyword] patterns configured'))
 
 
 def _kwfwrite(ui, repo, expand, *pats, **opts):
     '''Selects files and passes them to kwtemplater.overwrite.'''
     wctx = repo[None]
     if len(wctx.parents()) > 1:
-        raise error.Abort(_('outstanding uncommitted merge'))
+        raise error.Abort(_(b'outstanding uncommitted merge'))
     kwt = getattr(repo, '_keywordkwt', None)
     with repo.wlock():
         status = _status(ui, repo, wctx, kwt, *pats, **opts)
         if status.modified or status.added or status.removed or status.deleted:
-            raise error.Abort(_('outstanding uncommitted changes'))
+            raise error.Abort(_(b'outstanding uncommitted changes'))
         kwt.overwrite(wctx, status.clean, True, expand)
 
 
 @command(
-    'kwdemo',
+    b'kwdemo',
     [
-        ('d', 'default', None, _('show default keyword template maps')),
-        ('f', 'rcfile', '', _('read maps from rcfile'), _('FILE')),
+        (b'd', b'default', None, _(b'show default keyword template maps')),
+        (b'f', b'rcfile', b'', _(b'read maps from rcfile'), _(b'FILE')),
     ],
-    _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
+    _(b'hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
     optionalrepo=True,
 )
 def demo(ui, repo, *args, **opts):
@@ -461,55 +461,55 @@
     '''
 
     def demoitems(section, items):
-        ui.write('[%s]\n' % section)
+        ui.write(b'[%s]\n' % section)
         for k, v in sorted(items):
             if isinstance(v, bool):
                 v = stringutil.pprint(v)
-            ui.write('%s = %s\n' % (k, v))
+            ui.write(b'%s = %s\n' % (k, v))
 
-    fn = 'demo.txt'
-    tmpdir = pycompat.mkdtemp('', 'kwdemo.')
-    ui.note(_('creating temporary repository at %s\n') % tmpdir)
+    fn = b'demo.txt'
+    tmpdir = pycompat.mkdtemp(b'', b'kwdemo.')
+    ui.note(_(b'creating temporary repository at %s\n') % tmpdir)
     if repo is None:
         baseui = ui
     else:
         baseui = repo.baseui
     repo = localrepo.instance(baseui, tmpdir, create=True)
-    ui.setconfig('keyword', fn, '', 'keyword')
-    svn = ui.configbool('keywordset', 'svn')
+    ui.setconfig(b'keyword', fn, b'', b'keyword')
+    svn = ui.configbool(b'keywordset', b'svn')
     # explicitly set keywordset for demo output
-    ui.setconfig('keywordset', 'svn', svn, 'keyword')
+    ui.setconfig(b'keywordset', b'svn', svn, b'keyword')
 
-    uikwmaps = ui.configitems('keywordmaps')
+    uikwmaps = ui.configitems(b'keywordmaps')
     if args or opts.get(r'rcfile'):
-        ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
+        ui.status(_(b'\n\tconfiguration using custom keyword template maps\n'))
         if uikwmaps:
-            ui.status(_('\textending current template maps\n'))
+            ui.status(_(b'\textending current template maps\n'))
         if opts.get(r'default') or not uikwmaps:
             if svn:
-                ui.status(_('\toverriding default svn keywordset\n'))
+                ui.status(_(b'\toverriding default svn keywordset\n'))
             else:
-                ui.status(_('\toverriding default cvs keywordset\n'))
+                ui.status(_(b'\toverriding default cvs keywordset\n'))
         if opts.get(r'rcfile'):
-            ui.readconfig(opts.get('rcfile'))
+            ui.readconfig(opts.get(b'rcfile'))
         if args:
             # simulate hgrc parsing
-            rcmaps = '[keywordmaps]\n%s\n' % '\n'.join(args)
-            repo.vfs.write('hgrc', rcmaps)
-            ui.readconfig(repo.vfs.join('hgrc'))
-        kwmaps = dict(ui.configitems('keywordmaps'))
+            rcmaps = b'[keywordmaps]\n%s\n' % b'\n'.join(args)
+            repo.vfs.write(b'hgrc', rcmaps)
+            ui.readconfig(repo.vfs.join(b'hgrc'))
+        kwmaps = dict(ui.configitems(b'keywordmaps'))
     elif opts.get(r'default'):
         if svn:
-            ui.status(_('\n\tconfiguration using default svn keywordset\n'))
+            ui.status(_(b'\n\tconfiguration using default svn keywordset\n'))
         else:
-            ui.status(_('\n\tconfiguration using default cvs keywordset\n'))
+            ui.status(_(b'\n\tconfiguration using default cvs keywordset\n'))
         kwmaps = _defaultkwmaps(ui)
         if uikwmaps:
-            ui.status(_('\tdisabling current template maps\n'))
+            ui.status(_(b'\tdisabling current template maps\n'))
             for k, v in kwmaps.iteritems():
-                ui.setconfig('keywordmaps', k, v, 'keyword')
+                ui.setconfig(b'keywordmaps', k, v, b'keyword')
     else:
-        ui.status(_('\n\tconfiguration using current keyword template maps\n'))
+        ui.status(_(b'\n\tconfiguration using current keyword template maps\n'))
         if uikwmaps:
             kwmaps = dict(uikwmaps)
         else:
@@ -517,32 +517,32 @@
 
     uisetup(ui)
     reposetup(ui, repo)
-    ui.write('[extensions]\nkeyword =\n')
-    demoitems('keyword', ui.configitems('keyword'))
-    demoitems('keywordset', ui.configitems('keywordset'))
-    demoitems('keywordmaps', kwmaps.iteritems())
-    keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
+    ui.write(b'[extensions]\nkeyword =\n')
+    demoitems(b'keyword', ui.configitems(b'keyword'))
+    demoitems(b'keywordset', ui.configitems(b'keywordset'))
+    demoitems(b'keywordmaps', kwmaps.iteritems())
+    keywords = b'$' + b'$\n$'.join(sorted(kwmaps.keys())) + b'$\n'
     repo.wvfs.write(fn, keywords)
     repo[None].add([fn])
-    ui.note(_('\nkeywords written to %s:\n') % fn)
+    ui.note(_(b'\nkeywords written to %s:\n') % fn)
     ui.note(keywords)
     with repo.wlock():
-        repo.dirstate.setbranch('demobranch')
-    for name, cmd in ui.configitems('hooks'):
-        if name.split('.', 1)[0].find('commit') > -1:
-            repo.ui.setconfig('hooks', name, '', 'keyword')
-    msg = _('hg keyword configuration and expansion example')
-    ui.note(("hg ci -m '%s'\n" % msg))
+        repo.dirstate.setbranch(b'demobranch')
+    for name, cmd in ui.configitems(b'hooks'):
+        if name.split(b'.', 1)[0].find(b'commit') > -1:
+            repo.ui.setconfig(b'hooks', name, b'', b'keyword')
+    msg = _(b'hg keyword configuration and expansion example')
+    ui.note((b"hg ci -m '%s'\n" % msg))
     repo.commit(text=msg)
-    ui.status(_('\n\tkeywords expanded\n'))
+    ui.status(_(b'\n\tkeywords expanded\n'))
     ui.write(repo.wread(fn))
     repo.wvfs.rmtree(repo.root)
 
 
 @command(
-    'kwexpand',
+    b'kwexpand',
     cmdutil.walkopts,
-    _('hg kwexpand [OPTION]... [FILE]...'),
+    _(b'hg kwexpand [OPTION]... [FILE]...'),
     inferrepo=True,
 )
 def expand(ui, repo, *pats, **opts):
@@ -557,14 +557,14 @@
 
 
 @command(
-    'kwfiles',
+    b'kwfiles',
     [
-        ('A', 'all', None, _('show keyword status flags of all files')),
-        ('i', 'ignore', None, _('show files excluded from expansion')),
-        ('u', 'unknown', None, _('only show unknown (not tracked) files')),
+        (b'A', b'all', None, _(b'show keyword status flags of all files')),
+        (b'i', b'ignore', None, _(b'show files excluded from expansion')),
+        (b'u', b'unknown', None, _(b'only show unknown (not tracked) files')),
     ]
     + cmdutil.walkopts,
-    _('hg kwfiles [OPTION]... [FILE]...'),
+    _(b'hg kwfiles [OPTION]... [FILE]...'),
     inferrepo=True,
 )
 def files(ui, repo, *pats, **opts):
@@ -594,31 +594,31 @@
     if pats:
         cwd = repo.getcwd()
     else:
-        cwd = ''
+        cwd = b''
     files = []
     opts = pycompat.byteskwargs(opts)
-    if not opts.get('unknown') or opts.get('all'):
+    if not opts.get(b'unknown') or opts.get(b'all'):
         files = sorted(status.modified + status.added + status.clean)
     kwfiles = kwt.iskwfile(files, wctx)
     kwdeleted = kwt.iskwfile(status.deleted, wctx)
     kwunknown = kwt.iskwfile(status.unknown, wctx)
-    if not opts.get('ignore') or opts.get('all'):
+    if not opts.get(b'ignore') or opts.get(b'all'):
         showfiles = kwfiles, kwdeleted, kwunknown
     else:
         showfiles = [], [], []
-    if opts.get('all') or opts.get('ignore'):
+    if opts.get(b'all') or opts.get(b'ignore'):
         showfiles += (
             [f for f in files if f not in kwfiles],
             [f for f in status.unknown if f not in kwunknown],
         )
-    kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split()
-    kwstates = zip(kwlabels, pycompat.bytestr('K!kIi'), showfiles)
-    fm = ui.formatter('kwfiles', opts)
-    fmt = '%.0s%s\n'
-    if opts.get('all') or ui.verbose:
-        fmt = '%s %s\n'
+    kwlabels = b'enabled deleted enabledunknown ignored ignoredunknown'.split()
+    kwstates = zip(kwlabels, pycompat.bytestr(b'K!kIi'), showfiles)
+    fm = ui.formatter(b'kwfiles', opts)
+    fmt = b'%.0s%s\n'
+    if opts.get(b'all') or ui.verbose:
+        fmt = b'%s %s\n'
     for kwstate, char, filenames in kwstates:
-        label = 'kwfiles.' + kwstate
+        label = b'kwfiles.' + kwstate
         for f in filenames:
             fm.startitem()
             fm.data(kwstatus=char, path=f)
@@ -627,9 +627,9 @@
 
 
 @command(
-    'kwshrink',
+    b'kwshrink',
     cmdutil.walkopts,
-    _('hg kwshrink [OPTION]... [FILE]...'),
+    _(b'hg kwshrink [OPTION]... [FILE]...'),
     inferrepo=True,
 )
 def shrink(ui, repo, *pats, **opts):
@@ -715,7 +715,7 @@
         return orig(ui, repo, pats, opts, rename)
     with repo.wlock():
         orig(ui, repo, pats, opts, rename)
-        if opts.get('dry_run'):
+        if opts.get(b'dry_run'):
             return
         wctx = repo[None]
         cwd = repo.getcwd()
@@ -725,7 +725,7 @@
             expansion or a symlink which points to a file configured for
             expansion. '''
             source = repo.dirstate.copied(dest)
-            if 'l' in wctx.flags(source):
+            if b'l' in wctx.flags(source):
                 source = pathutil.canonpath(
                     repo.root, cwd, os.path.realpath(source)
                 )
@@ -734,7 +734,7 @@
         candidates = [
             f
             for f in repo.dirstate.copies()
-            if 'l' not in wctx.flags(f) and haskwsource(f)
+            if b'l' not in wctx.flags(f) and haskwsource(f)
         ]
         kwt.overwrite(wctx, candidates, False, False)
 
@@ -748,10 +748,10 @@
         # record returns 0 even when nothing has changed
         # therefore compare nodes before and after
         kwt.postcommit = True
-        ctx = repo['.']
+        ctx = repo[b'.']
         wstatus = ctx.status()
         ret = orig(ui, repo, commitfunc, *pats, **opts)
-        recctx = repo['.']
+        recctx = repo[b'.']
         if ctx != recctx:
             modified, added = _preselect(wstatus, recctx.files())
             kwt.restrict = False
@@ -774,7 +774,7 @@
         and (
             self._repo._encodefilterpats
             or kwt.match(fctx.path())
-            and 'l' not in fctx.flags()
+            and b'l' not in fctx.flags()
             or self.size() - 4 == fctx.size()
         )
         or self.size() == fctx.size()
@@ -794,17 +794,17 @@
     def kwdispatch_parse(orig, ui, args):
         '''Monkeypatch dispatch._parse to obtain running hg command.'''
         cmd, func, args, options, cmdoptions = orig(ui, args)
-        kwtools['hgcmd'] = cmd
+        kwtools[b'hgcmd'] = cmd
         return cmd, func, args, options, cmdoptions
 
-    extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
+    extensions.wrapfunction(dispatch, b'_parse', kwdispatch_parse)
 
-    extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
-    extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
-    extensions.wrapfunction(patch, 'diff', kwdiff)
-    extensions.wrapfunction(cmdutil, 'amend', kw_amend)
-    extensions.wrapfunction(cmdutil, 'copy', kw_copy)
-    extensions.wrapfunction(cmdutil, 'dorecord', kw_dorecord)
+    extensions.wrapfunction(context.filectx, b'cmp', kwfilectx_cmp)
+    extensions.wrapfunction(patch.patchfile, b'__init__', kwpatchfile_init)
+    extensions.wrapfunction(patch, b'diff', kwdiff)
+    extensions.wrapfunction(cmdutil, b'amend', kw_amend)
+    extensions.wrapfunction(cmdutil, b'copy', kw_copy)
+    extensions.wrapfunction(cmdutil, b'dorecord', kw_dorecord)
     for c in nokwwebcommands.split():
         extensions.wrapfunction(webcommands, c, kwweb_skip)
 
@@ -815,17 +815,17 @@
     try:
         if (
             not repo.local()
-            or kwtools['hgcmd'] in nokwcommands.split()
-            or '.hg' in util.splitpath(repo.root)
-            or repo._url.startswith('bundle:')
+            or kwtools[b'hgcmd'] in nokwcommands.split()
+            or b'.hg' in util.splitpath(repo.root)
+            or repo._url.startswith(b'bundle:')
         ):
             return
     except AttributeError:
         pass
 
-    inc, exc = [], ['.hg*']
-    for pat, opt in ui.configitems('keyword'):
-        if opt != 'ignore':
+    inc, exc = [], [b'.hg*']
+    for pat, opt in ui.configitems(b'keyword'):
+        if opt != b'ignore':
             inc.append(pat)
         else:
             exc.append(pat)
@@ -836,7 +836,7 @@
 
     class kwrepo(repo.__class__):
         def file(self, f):
-            if f[0] == '/':
+            if f[0] == b'/':
                 f = f[1:]
             return kwfilelog(self.svfs, kwt, f)
 
@@ -870,10 +870,10 @@
                 origrestrict = kwt.restrict
                 try:
                     if not dryrun:
-                        changed = self['.'].files()
+                        changed = self[b'.'].files()
                     ret = super(kwrepo, self).rollback(dryrun, force)
                     if not dryrun:
-                        ctx = self['.']
+                        ctx = self[b'.']
                         modified, added = _preselect(ctx.status(), changed)
                         kwt.restrict = False
                         kwt.overwrite(ctx, modified, True, True)
--- a/hgext/largefiles/__init__.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/largefiles/__init__.py	Sun Oct 06 09:48:39 2019 -0400
@@ -128,7 +128,7 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 eh = exthelper.exthelper()
 eh.merge(lfcommands.eh)
@@ -136,13 +136,13 @@
 eh.merge(proto.eh)
 
 eh.configitem(
-    'largefiles', 'minsize', default=eh.configitem.dynamicdefault,
+    b'largefiles', b'minsize', default=eh.configitem.dynamicdefault,
 )
 eh.configitem(
-    'largefiles', 'patterns', default=list,
+    b'largefiles', b'patterns', default=list,
 )
 eh.configitem(
-    'largefiles', 'usercache', default=None,
+    b'largefiles', b'usercache', default=None,
 )
 
 cmdtable = eh.cmdtable
@@ -154,7 +154,7 @@
 
 def featuresetup(ui, supported):
     # don't die on seeing a repo with the largefiles requirement
-    supported |= {'largefiles'}
+    supported |= {b'largefiles'}
 
 
 @eh.uisetup
@@ -162,25 +162,25 @@
     localrepo.featuresetupfuncs.add(featuresetup)
     hg.wirepeersetupfuncs.append(proto.wirereposetup)
 
-    cmdutil.outgoinghooks.add('largefiles', overrides.outgoinghook)
-    cmdutil.summaryremotehooks.add('largefiles', overrides.summaryremotehook)
+    cmdutil.outgoinghooks.add(b'largefiles', overrides.outgoinghook)
+    cmdutil.summaryremotehooks.add(b'largefiles', overrides.summaryremotehook)
 
     # create the new wireproto commands ...
-    wireprotov1server.wireprotocommand('putlfile', 'sha', permission='push')(
+    wireprotov1server.wireprotocommand(b'putlfile', b'sha', permission=b'push')(
         proto.putlfile
     )
-    wireprotov1server.wireprotocommand('getlfile', 'sha', permission='pull')(
+    wireprotov1server.wireprotocommand(b'getlfile', b'sha', permission=b'pull')(
         proto.getlfile
     )
-    wireprotov1server.wireprotocommand('statlfile', 'sha', permission='pull')(
-        proto.statlfile
-    )
-    wireprotov1server.wireprotocommand('lheads', '', permission='pull')(
+    wireprotov1server.wireprotocommand(
+        b'statlfile', b'sha', permission=b'pull'
+    )(proto.statlfile)
+    wireprotov1server.wireprotocommand(b'lheads', b'', permission=b'pull')(
         wireprotov1server.heads
     )
 
     extensions.wrapfunction(
-        wireprotov1server.commands['heads'], 'func', proto.heads
+        wireprotov1server.commands[b'heads'], b'func', proto.heads
     )
     # TODO also wrap wireproto.commandsv2 once heads is implemented there.
 
@@ -193,9 +193,9 @@
 
     # override some extensions' stuff as well
     for name, module in extensions.extensions():
-        if name == 'rebase':
+        if name == b'rebase':
             # TODO: teach exthelper to handle this
-            extensions.wrapfunction(module, 'rebase', overrides.overriderebase)
+            extensions.wrapfunction(module, b'rebase', overrides.overriderebase)
 
 
 revsetpredicate = eh.revsetpredicate
--- a/hgext/largefiles/basestore.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/largefiles/basestore.py	Sun Oct 06 09:48:39 2019 -0400
@@ -27,7 +27,7 @@
         self.detail = detail
 
     def longmessage(self):
-        return _("error getting id %s from url %s for file %s: %s\n") % (
+        return _(b"error getting id %s from url %s for file %s: %s\n") % (
             self.hash,
             util.hidepassword(self.url),
             self.filename,
@@ -35,7 +35,7 @@
         )
 
     def __str__(self):
-        return "%s: %s" % (util.hidepassword(self.url), self.detail)
+        return b"%s: %s" % (util.hidepassword(self.url), self.detail)
 
 
 class basestore(object):
@@ -46,12 +46,12 @@
 
     def put(self, source, hash):
         '''Put source file into the store so it can be retrieved by hash.'''
-        raise NotImplementedError('abstract method')
+        raise NotImplementedError(b'abstract method')
 
     def exists(self, hashes):
         '''Check to see if the store contains the given hashes. Given an
         iterable of hashes it returns a mapping from hash to bool.'''
-        raise NotImplementedError('abstract method')
+        raise NotImplementedError(b'abstract method')
 
     def get(self, files):
         '''Get the specified largefiles from the store and write to local
@@ -69,16 +69,16 @@
         at = 0
         available = self.exists(set(hash for (_filename, hash) in files))
         with ui.makeprogress(
-            _('getting largefiles'), unit=_('files'), total=len(files)
+            _(b'getting largefiles'), unit=_(b'files'), total=len(files)
         ) as progress:
             for filename, hash in files:
                 progress.update(at)
                 at += 1
-                ui.note(_('getting %s:%s\n') % (filename, hash))
+                ui.note(_(b'getting %s:%s\n') % (filename, hash))
 
                 if not available.get(hash):
                     ui.warn(
-                        _('%s: largefile %s not available from %s\n')
+                        _(b'%s: largefile %s not available from %s\n')
                         % (filename, hash, util.hidepassword(self.url))
                     )
                     missing.append(filename)
@@ -96,10 +96,10 @@
         store and in the usercache.
         filename is for informational messages only.
         """
-        util.makedirs(lfutil.storepath(self.repo, ''))
+        util.makedirs(lfutil.storepath(self.repo, b''))
         storefilename = lfutil.storepath(self.repo, hash)
 
-        tmpname = storefilename + '.tmp'
+        tmpname = storefilename + b'.tmp'
         with util.atomictempfile(
             tmpname, createmode=self.repo.store.createmode
         ) as tmpfile:
@@ -107,12 +107,12 @@
                 gothash = self._getfile(tmpfile, filename, hash)
             except StoreError as err:
                 self.ui.warn(err.longmessage())
-                gothash = ""
+                gothash = b""
 
         if gothash != hash:
-            if gothash != "":
+            if gothash != b"":
                 self.ui.warn(
-                    _('%s: data corruption (expected %s, got %s)\n')
+                    _(b'%s: data corruption (expected %s, got %s)\n')
                     % (filename, hash, gothash)
                 )
             util.unlink(tmpname)
@@ -128,13 +128,13 @@
         Return 0 if all is well, non-zero on any errors.'''
 
         self.ui.status(
-            _('searching %d changesets for largefiles\n') % len(revs)
+            _(b'searching %d changesets for largefiles\n') % len(revs)
         )
         verified = set()  # set of (filename, filenode) tuples
         filestocheck = []  # list of (cset, filename, expectedhash)
         for rev in revs:
             cctx = self.repo[rev]
-            cset = "%d:%s" % (cctx.rev(), node.short(cctx.node()))
+            cset = b"%d:%s" % (cctx.rev(), node.short(cctx.node()))
 
             for standin in cctx:
                 filename = lfutil.splitstandin(standin)
@@ -152,12 +152,12 @@
         numlfiles = len({fname for (fname, fnode) in verified})
         if contents:
             self.ui.status(
-                _('verified contents of %d revisions of %d largefiles\n')
+                _(b'verified contents of %d revisions of %d largefiles\n')
                 % (numrevs, numlfiles)
             )
         else:
             self.ui.status(
-                _('verified existence of %d revisions of %d largefiles\n')
+                _(b'verified existence of %d revisions of %d largefiles\n')
                 % (numrevs, numlfiles)
             )
         return int(failed)
@@ -168,7 +168,7 @@
         downloads and return the hash.  Close tmpfile.  Raise
         StoreError if unable to download the file (e.g. it does not
         exist in the store).'''
-        raise NotImplementedError('abstract method')
+        raise NotImplementedError(b'abstract method')
 
     def _verifyfiles(self, contents, filestocheck):
         '''Perform the actual verification of files in the store.
@@ -176,4 +176,4 @@
         'filestocheck' is list of files to check.
         Returns _true_ if any problems are found!
         '''
-        raise NotImplementedError('abstract method')
+        raise NotImplementedError(b'abstract method')
--- a/hgext/largefiles/lfcommands.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/largefiles/lfcommands.py	Sun Oct 06 09:48:39 2019 -0400
@@ -45,23 +45,23 @@
 
 
 @eh.command(
-    'lfconvert',
+    b'lfconvert',
     [
         (
-            's',
-            'size',
-            '',
-            _('minimum size (MB) for files to be converted as largefiles'),
-            'SIZE',
+            b's',
+            b'size',
+            b'',
+            _(b'minimum size (MB) for files to be converted as largefiles'),
+            b'SIZE',
         ),
         (
-            '',
-            'to-normal',
+            b'',
+            b'to-normal',
             False,
-            _('convert from a largefiles repo to a normal repo'),
+            _(b'convert from a largefiles repo to a normal repo'),
         ),
     ],
-    _('hg lfconvert SOURCE DEST [FILE ...]'),
+    _(b'hg lfconvert SOURCE DEST [FILE ...]'),
     norepo=True,
     inferrepo=True,
 )
@@ -85,19 +85,19 @@
     this, the DEST repository can be used without largefiles at all.'''
 
     opts = pycompat.byteskwargs(opts)
-    if opts['to_normal']:
+    if opts[b'to_normal']:
         tolfile = False
     else:
         tolfile = True
-        size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
+        size = lfutil.getminsize(ui, True, opts.get(b'size'), default=None)
 
     if not hg.islocal(src):
-        raise error.Abort(_('%s is not a local Mercurial repo') % src)
+        raise error.Abort(_(b'%s is not a local Mercurial repo') % src)
     if not hg.islocal(dest):
-        raise error.Abort(_('%s is not a local Mercurial repo') % dest)
+        raise error.Abort(_(b'%s is not a local Mercurial repo') % dest)
 
     rsrc = hg.repository(ui, src)
-    ui.status(_('initializing destination %s\n') % dest)
+    ui.status(_(b'initializing destination %s\n') % dest)
     rdst = hg.repository(ui, dest, create=True)
 
     success = False
@@ -122,17 +122,17 @@
             lfiles = set()
             normalfiles = set()
             if not pats:
-                pats = ui.configlist(lfutil.longname, 'patterns')
+                pats = ui.configlist(lfutil.longname, b'patterns')
             if pats:
-                matcher = matchmod.match(rsrc.root, '', list(pats))
+                matcher = matchmod.match(rsrc.root, b'', list(pats))
             else:
                 matcher = None
 
             lfiletohash = {}
             with ui.makeprogress(
-                _('converting revisions'),
-                unit=_('revisions'),
-                total=rsrc['tip'].rev(),
+                _(b'converting revisions'),
+                unit=_(b'revisions'),
+                total=rsrc[b'tip'].rev(),
             ) as progress:
                 for ctx in ctxs:
                     progress.update(ctx.rev())
@@ -162,14 +162,14 @@
             # If there were any files converted to largefiles, add largefiles
             # to the destination repository's requirements.
             if lfiles:
-                rdst.requirements.add('largefiles')
+                rdst.requirements.add(b'largefiles')
                 rdst._writerequirements()
         else:
 
             class lfsource(filemap.filemap_source):
                 def __init__(self, ui, source):
                     super(lfsource, self).__init__(ui, source, None)
-                    self.filemapper.rename[lfutil.shortname] = '.'
+                    self.filemapper.rename[lfutil.shortname] = b'.'
 
                 def getfile(self, name, rev):
                     realname, realrev = rev
@@ -187,7 +187,7 @@
 
                     if path is None:
                         raise error.Abort(
-                            _("missing largefile for '%s' in %s")
+                            _(b"missing largefile for '%s' in %s")
                             % (realname, realrev)
                         )
                     return util.readfile(path), f[1]
@@ -202,13 +202,15 @@
 
             found, missing = downloadlfiles(ui, rsrc)
             if missing != 0:
-                raise error.Abort(_("all largefiles must be present locally"))
+                raise error.Abort(_(b"all largefiles must be present locally"))
 
             orig = convcmd.converter
             convcmd.converter = converter
 
             try:
-                convcmd.convert(ui, src, dest, source_type='hg', dest_type='hg')
+                convcmd.convert(
+                    ui, src, dest, source_type=b'hg', dest_type=b'hg'
+                )
             finally:
                 convcmd.converter = orig
         success = True
@@ -245,10 +247,11 @@
                     renamed = False
                 renamedlfile = renamed and renamed in lfiles
                 islfile |= renamedlfile
-                if 'l' in fctx.flags():
+                if b'l' in fctx.flags():
                     if renamedlfile:
                         raise error.Abort(
-                            _('renamed/copied largefile %s becomes symlink') % f
+                            _(b'renamed/copied largefile %s becomes symlink')
+                            % f
                         )
                     islfile = False
             if islfile:
@@ -262,18 +265,20 @@
             # largefile in manifest if it has not been removed/renamed
             if f in ctx.manifest():
                 fctx = ctx.filectx(f)
-                if 'l' in fctx.flags():
+                if b'l' in fctx.flags():
                     renamed = fctx.copysource()
                     if renamed and renamed in lfiles:
-                        raise error.Abort(_('largefile %s becomes symlink') % f)
+                        raise error.Abort(
+                            _(b'largefile %s becomes symlink') % f
+                        )
 
                 # largefile was modified, update standins
-                m = hashlib.sha1('')
+                m = hashlib.sha1(b'')
                 m.update(ctx[f].data())
                 hash = node.hex(m.digest())
                 if f not in lfiletohash or lfiletohash[f] != hash:
                     rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
-                    executable = 'x' in ctx[f].flags()
+                    executable = b'x' in ctx[f].flags()
                     lfutil.writestandin(rdst, fstandin, hash, executable)
                     lfiletohash[f] = hash
         else:
@@ -299,9 +304,9 @@
                 repo,
                 memctx,
                 f,
-                lfiletohash[srcfname] + '\n',
-                'l' in fctx.flags(),
-                'x' in fctx.flags(),
+                lfiletohash[srcfname] + b'\n',
+                b'l' in fctx.flags(),
+                b'x' in fctx.flags(),
                 renamed,
             )
         else:
@@ -358,10 +363,10 @@
     renamed = fctx.copysource()
 
     data = fctx.data()
-    if f == '.hgtags':
+    if f == b'.hgtags':
         data = _converttags(repo.ui, revmap, data)
     return context.memfilectx(
-        repo, ctx, f, data, 'l' in fctx.flags(), 'x' in fctx.flags(), renamed
+        repo, ctx, f, data, b'l' in fctx.flags(), b'x' in fctx.flags(), renamed
     )
 
 
@@ -370,28 +375,28 @@
     newdata = []
     for line in data.splitlines():
         try:
-            id, name = line.split(' ', 1)
+            id, name = line.split(b' ', 1)
         except ValueError:
-            ui.warn(_('skipping incorrectly formatted tag %s\n') % line)
+            ui.warn(_(b'skipping incorrectly formatted tag %s\n') % line)
             continue
         try:
             newid = node.bin(id)
         except TypeError:
-            ui.warn(_('skipping incorrectly formatted id %s\n') % id)
+            ui.warn(_(b'skipping incorrectly formatted id %s\n') % id)
             continue
         try:
-            newdata.append('%s %s\n' % (node.hex(revmap[newid]), name))
+            newdata.append(b'%s %s\n' % (node.hex(revmap[newid]), name))
         except KeyError:
-            ui.warn(_('no mapping for id %s\n') % id)
+            ui.warn(_(b'no mapping for id %s\n') % id)
             continue
-    return ''.join(newdata)
+    return b''.join(newdata)
 
 
 def _islfile(file, ctx, matcher, size):
     '''Return true if file should be considered a largefile, i.e.
     matcher matches it or it is larger than size.'''
     # never store special .hg* files as largefiles
-    if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
+    if file == b'.hgtags' or file == b'.hgignore' or file == b'.hgsigs':
         return False
     if matcher and matcher(file):
         return True
@@ -410,13 +415,13 @@
     store = storefactory.openstore(rsrc, rdst, put=True)
 
     at = 0
-    ui.debug("sending statlfile command for %d largefiles\n" % len(files))
+    ui.debug(b"sending statlfile command for %d largefiles\n" % len(files))
     retval = store.exists(files)
     files = [h for h in files if not retval[h]]
-    ui.debug("%d largefiles need to be uploaded\n" % len(files))
+    ui.debug(b"%d largefiles need to be uploaded\n" % len(files))
 
     with ui.makeprogress(
-        _('uploading largefiles'), unit=_('files'), total=len(files)
+        _(b'uploading largefiles'), unit=_(b'files'), total=len(files)
     ) as progress:
         for hash in files:
             progress.update(at)
@@ -424,8 +429,8 @@
             if not source:
                 raise error.Abort(
                     _(
-                        'largefile %s missing from store'
-                        ' (needs to be uploaded)'
+                        b'largefile %s missing from store'
+                        b' (needs to be uploaded)'
                     )
                     % hash
                 )
@@ -441,9 +446,9 @@
     matches the revision ID).  With --all, check every changeset in
     this repository.'''
     if all:
-        revs = repo.revs('all()')
+        revs = repo.revs(b'all()')
     else:
-        revs = ['.']
+        revs = [b'.']
 
     store = storefactory.openstore(repo)
     return store.verify(revs, contents=contents)
@@ -489,13 +494,13 @@
     totalsuccess = 0
     totalmissing = 0
     if rev != []:  # walkchangerevs on empty list would return all revs
-        for ctx in cmdutil.walkchangerevs(repo, match, {'rev': rev}, prepare):
+        for ctx in cmdutil.walkchangerevs(repo, match, {b'rev': rev}, prepare):
             success, missing = cachelfiles(ui, repo, ctx.node())
             totalsuccess += len(success)
             totalmissing += len(missing)
-    ui.status(_("%d additional largefiles cached\n") % totalsuccess)
+    ui.status(_(b"%d additional largefiles cached\n") % totalsuccess)
     if totalmissing > 0:
-        ui.status(_("%d largefiles failed to download\n") % totalmissing)
+        ui.status(_(b"%d largefiles failed to download\n") % totalmissing)
     return totalsuccess, totalmissing
 
 
@@ -534,9 +539,9 @@
                     shutil.copyfile(wvfs.join(lfile), wvfs.join(lfileorig))
                     wvfs.unlinkpath(standinorig)
                 expecthash = lfutil.readasstandin(wctx[standin])
-                if expecthash != '':
+                if expecthash != b'':
                     if lfile not in wctx:  # not switched to normal file
-                        if repo.dirstate[standin] != '?':
+                        if repo.dirstate[standin] != b'?':
                             wvfs.unlinkpath(lfile, ignoremissing=True)
                         else:
                             dropped.add(lfile)
@@ -571,7 +576,7 @@
                 # the M state.
                 lfutil.synclfdirstate(repo, lfdirstate, f, normallookup)
 
-            statuswriter(_('getting changed largefiles\n'))
+            statuswriter(_(b'getting changed largefiles\n'))
             cachelfiles(ui, repo, None, lfiles)
 
         for lfile in lfiles:
@@ -610,17 +615,17 @@
         lfdirstate.write()
         if lfiles:
             statuswriter(
-                _('%d largefiles updated, %d removed\n') % (updated, removed)
+                _(b'%d largefiles updated, %d removed\n') % (updated, removed)
             )
 
 
 @eh.command(
-    'lfpull',
-    [('r', 'rev', [], _('pull largefiles for these revisions'))]
+    b'lfpull',
+    [(b'r', b'rev', [], _(b'pull largefiles for these revisions'))]
     + cmdutil.remoteopts,
-    _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'),
+    _(b'-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'),
 )
-def lfpull(ui, repo, source="default", **opts):
+def lfpull(ui, repo, source=b"default", **opts):
     """pull largefiles for the specified revisions from the specified source
 
     Pull largefiles that are referenced from local changesets but missing
@@ -645,20 +650,20 @@
 
     revs = opts.get(r'rev', [])
     if not revs:
-        raise error.Abort(_('no revisions specified'))
+        raise error.Abort(_(b'no revisions specified'))
     revs = scmutil.revrange(repo, revs)
 
     numcached = 0
     for rev in revs:
-        ui.note(_('pulling largefiles for revision %d\n') % rev)
+        ui.note(_(b'pulling largefiles for revision %d\n') % rev)
         (cached, missing) = cachelfiles(ui, repo, rev)
         numcached += len(cached)
-    ui.status(_("%d largefiles cached\n") % numcached)
+    ui.status(_(b"%d largefiles cached\n") % numcached)
 
 
-@eh.command('debuglfput', [] + cmdutil.remoteopts, _('FILE'))
+@eh.command(b'debuglfput', [] + cmdutil.remoteopts, _(b'FILE'))
 def debuglfput(ui, repo, filepath, **kwargs):
     hash = lfutil.hashfile(filepath)
     storefactory.openstore(repo).put(filepath, hash)
-    ui.write('%s\n' % hash)
+    ui.write(b'%s\n' % hash)
     return 0
--- a/hgext/largefiles/lfutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/largefiles/lfutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -31,9 +31,9 @@
     vfs as vfsmod,
 )
 
-shortname = '.hglf'
-shortnameslash = shortname + '/'
-longname = 'largefiles'
+shortname = b'.hglf'
+shortnameslash = shortname + b'/'
+longname = b'largefiles'
 
 # -- Private worker functions ------------------------------------------
 
@@ -41,16 +41,16 @@
 def getminsize(ui, assumelfiles, opt, default=10):
     lfsize = opt
     if not lfsize and assumelfiles:
-        lfsize = ui.config(longname, 'minsize', default=default)
+        lfsize = ui.config(longname, b'minsize', default=default)
     if lfsize:
         try:
             lfsize = float(lfsize)
         except ValueError:
             raise error.Abort(
-                _('largefiles: size must be number (not %s)\n') % lfsize
+                _(b'largefiles: size must be number (not %s)\n') % lfsize
             )
     if lfsize is None:
-        raise error.Abort(_('minimum size for largefiles must be specified'))
+        raise error.Abort(_(b'minimum size for largefiles must be specified'))
     return lfsize
 
 
@@ -61,7 +61,7 @@
         util.oslink(src, dest)
     except OSError:
         # if hardlinks fail, fallback on atomic copy
-        with open(src, 'rb') as srcf, util.atomictempfile(dest) as dstf:
+        with open(src, b'rb') as srcf, util.atomictempfile(dest) as dstf:
             for chunk in util.filechunkiter(srcf):
                 dstf.write(chunk)
         os.chmod(dest, os.stat(src).st_mode)
@@ -77,29 +77,31 @@
 
 def _usercachedir(ui, name=longname):
     '''Return the location of the "global" largefiles cache.'''
-    path = ui.configpath(name, 'usercache')
+    path = ui.configpath(name, b'usercache')
     if path:
         return path
     if pycompat.iswindows:
         appdata = encoding.environ.get(
-            'LOCALAPPDATA', encoding.environ.get('APPDATA')
+            b'LOCALAPPDATA', encoding.environ.get(b'APPDATA')
         )
         if appdata:
             return os.path.join(appdata, name)
     elif pycompat.isdarwin:
-        home = encoding.environ.get('HOME')
+        home = encoding.environ.get(b'HOME')
         if home:
-            return os.path.join(home, 'Library', 'Caches', name)
+            return os.path.join(home, b'Library', b'Caches', name)
     elif pycompat.isposix:
-        path = encoding.environ.get('XDG_CACHE_HOME')
+        path = encoding.environ.get(b'XDG_CACHE_HOME')
         if path:
             return os.path.join(path, name)
-        home = encoding.environ.get('HOME')
+        home = encoding.environ.get(b'HOME')
         if home:
-            return os.path.join(home, '.cache', name)
+            return os.path.join(home, b'.cache', name)
     else:
-        raise error.Abort(_('unknown operating system: %s\n') % pycompat.osname)
-    raise error.Abort(_('unknown %s usercache location') % name)
+        raise error.Abort(
+            _(b'unknown operating system: %s\n') % pycompat.osname
+        )
+    raise error.Abort(_(b'unknown %s usercache location') % name)
 
 
 def inusercache(ui, hash):
@@ -113,10 +115,10 @@
     Return None if the file can't be found locally.'''
     path, exists = findstorepath(repo, hash)
     if exists:
-        repo.ui.note(_('found %s in store\n') % hash)
+        repo.ui.note(_(b'found %s in store\n') % hash)
         return path
     elif inusercache(repo.ui, hash):
-        repo.ui.note(_('found %s in system cache\n') % hash)
+        repo.ui.note(_(b'found %s in system cache\n') % hash)
         path = storepath(repo, hash)
         link(usercachepath(repo.ui, hash), path)
         return path
@@ -174,7 +176,7 @@
     # If the largefiles dirstate does not exist, populate and create
     # it. This ensures that we create it on the first meaningful
     # largefiles operation in a new clone.
-    if create and not vfs.exists(vfs.join(lfstoredir, 'dirstate')):
+    if create and not vfs.exists(vfs.join(lfstoredir, b'dirstate')):
         matcher = getstandinmatcher(repo)
         standins = repo.dirstate.walk(
             matcher, subrepos=[], unknown=False, ignored=False
@@ -190,7 +192,7 @@
 
 
 def lfdirstatestatus(lfdirstate, repo):
-    pctx = repo['.']
+    pctx = repo[b'.']
     match = matchmod.always()
     unsure, s = lfdirstate.status(
         match, subrepos=[], ignored=False, clean=False, unknown=False
@@ -220,7 +222,7 @@
     return [
         splitstandin(f)
         for f in repo[rev].walk(matcher)
-        if rev is not None or repo.dirstate[f] != '?'
+        if rev is not None or repo.dirstate[f] != b'?'
     ]
 
 
@@ -268,11 +270,11 @@
     wvfs.makedirs(wvfs.dirname(wvfs.join(filename)))
     # The write may fail before the file is fully written, but we
     # don't use atomic writes in the working copy.
-    with open(path, 'rb') as srcfd, wvfs(filename, 'wb') as destfd:
+    with open(path, b'rb') as srcfd, wvfs(filename, b'wb') as destfd:
         gothash = copyandhash(util.filechunkiter(srcfd), destfd)
     if gothash != hash:
         repo.ui.warn(
-            _('%s: data corruption in %s with hash %s\n')
+            _(b'%s: data corruption in %s with hash %s\n')
             % (filename, path, gothash)
         )
         wvfs.unlink(filename)
@@ -289,7 +291,7 @@
         copytostoreabsolute(repo, wvfs.join(file), hash)
     else:
         repo.ui.warn(
-            _("%s: largefile %s not available from local store\n")
+            _(b"%s: largefile %s not available from local store\n")
             % (file, hash)
         )
 
@@ -309,7 +311,7 @@
         link(usercachepath(repo.ui, hash), storepath(repo, hash))
     else:
         util.makedirs(os.path.dirname(storepath(repo, hash)))
-        with open(file, 'rb') as srcf:
+        with open(file, b'rb') as srcf:
             with util.atomictempfile(
                 storepath(repo, hash), createmode=repo.store.createmode
             ) as dstf:
@@ -382,7 +384,7 @@
     # Split on / because that's what dirstate always uses, even on Windows.
     # Change local separator to / first just in case we are passed filenames
     # from an external source (like the command line).
-    bits = util.pconvert(filename).split('/', 1)
+    bits = util.pconvert(filename).split(b'/', 1)
     if len(bits) == 2 and bits[0] == shortname:
         return bits[1]
     else:
@@ -400,7 +402,7 @@
         executable = getexecutable(file)
         writestandin(repo, standin, hash, executable)
     else:
-        raise error.Abort(_('%s: file not found!') % lfile)
+        raise error.Abort(_(b'%s: file not found!') % lfile)
 
 
 def readasstandin(fctx):
@@ -412,13 +414,13 @@
 
 def writestandin(repo, standin, hash, executable):
     '''write hash to <repo.root>/<standin>'''
-    repo.wwrite(standin, hash + '\n', executable and 'x' or '')
+    repo.wwrite(standin, hash + b'\n', executable and b'x' or b'')
 
 
 def copyandhash(instream, outfile):
     '''Read bytes from instream (iterable) and write them to outfile,
     computing the SHA-1 hash of the data along the way. Return the hash.'''
-    hasher = hashlib.sha1('')
+    hasher = hashlib.sha1(b'')
     for data in instream:
         hasher.update(data)
         outfile.write(data)
@@ -427,8 +429,8 @@
 
 def hashfile(file):
     if not os.path.exists(file):
-        return ''
-    with open(file, 'rb') as fd:
+        return b''
+    with open(file, b'rb') as fd:
         return hexsha1(fd)
 
 
@@ -443,9 +445,9 @@
 
 def urljoin(first, second, *arg):
     def join(left, right):
-        if not left.endswith('/'):
-            left += '/'
-        if right.startswith('/'):
+        if not left.endswith(b'/'):
+            left += b'/'
+        if right.startswith(b'/'):
             right = right[1:]
         return left + right
 
@@ -465,7 +467,7 @@
 
 
 def httpsendfile(ui, filename):
-    return httpconnection.httpsendfile(ui, filename, 'rb')
+    return httpconnection.httpsendfile(ui, filename, b'rb')
 
 
 def unixpath(path):
@@ -475,7 +477,7 @@
 
 def islfilesrepo(repo):
     '''Return true if the repo is a largefile repo.'''
-    if 'largefiles' in repo.requirements and any(
+    if b'largefiles' in repo.requirements and any(
         shortnameslash in f[0] for f in repo.store.datafiles()
     ):
         return True
@@ -510,20 +512,20 @@
         stat = repo.dirstate._map[lfstandin]
         state, mtime = stat[0], stat[3]
     else:
-        state, mtime = '?', -1
-    if state == 'n':
+        state, mtime = b'?', -1
+    if state == b'n':
         if normallookup or mtime < 0 or not repo.wvfs.exists(lfile):
             # state 'n' doesn't ensure 'clean' in this case
             lfdirstate.normallookup(lfile)
         else:
             lfdirstate.normal(lfile)
-    elif state == 'm':
+    elif state == b'm':
         lfdirstate.normallookup(lfile)
-    elif state == 'r':
+    elif state == b'r':
         lfdirstate.remove(lfile)
-    elif state == 'a':
+    elif state == b'a':
         lfdirstate.add(lfile)
-    elif state == '?':
+    elif state == b'?':
         lfdirstate.drop(lfile)
 
 
@@ -569,8 +571,8 @@
 def getlfilestoupload(repo, missing, addfunc):
     makeprogress = repo.ui.makeprogress
     with makeprogress(
-        _('finding outgoing largefiles'),
-        unit=_('revisions'),
+        _(b'finding outgoing largefiles'),
+        unit=_(b'revisions'),
         total=len(missing),
     ) as progress:
         for i, n in enumerate(missing):
@@ -665,7 +667,7 @@
     lfdirstate = openlfdirstate(ui, repo)
     for fstandin in standins:
         lfile = splitstandin(fstandin)
-        if lfdirstate[lfile] != 'r':
+        if lfdirstate[lfile] != b'r':
             updatestandin(repo, lfile, fstandin)
 
     # Cook up a new matcher that only matches regular files or
@@ -689,10 +691,10 @@
         # standin removal, drop the normal file if it is unknown to dirstate.
         # Thus, skip plain largefile names but keep the standin.
         if f in lfiles or fstandin in standins:
-            if repo.dirstate[fstandin] != 'r':
-                if repo.dirstate[f] != 'r':
+            if repo.dirstate[fstandin] != b'r':
+                if repo.dirstate[f] != b'r':
                     continue
-            elif repo.dirstate[f] == '?':
+            elif repo.dirstate[f] == b'?':
                 continue
 
         actualfiles.append(f)
@@ -741,7 +743,7 @@
     Otherwise, this returns the function to always write out (or
     ignore if ``not forcibly``) status.
     '''
-    if forcibly is None and util.safehasattr(repo, '_largefilesenabled'):
+    if forcibly is None and util.safehasattr(repo, b'_largefilesenabled'):
         return repo._lfstatuswriters[-1]
     else:
         if forcibly:
--- a/hgext/largefiles/localstore.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/largefiles/localstore.py	Sun Oct 06 09:48:39 2019 -0400
@@ -42,9 +42,9 @@
         path = lfutil.findfile(self.remote, hash)
         if not path:
             raise basestore.StoreError(
-                filename, hash, self.url, _("can't get file locally")
+                filename, hash, self.url, _(b"can't get file locally")
             )
-        with open(path, 'rb') as fd:
+        with open(path, b'rb') as fd:
             return lfutil.copyandhash(util.filechunkiter(fd), tmpfile)
 
     def _verifyfiles(self, contents, filestocheck):
@@ -57,7 +57,7 @@
                 )
             if not exists:
                 self.ui.warn(
-                    _('changeset %s: %s references missing %s\n')
+                    _(b'changeset %s: %s references missing %s\n')
                     % (cset, filename, storepath)
                 )
                 failed = True
@@ -65,7 +65,7 @@
                 actualhash = lfutil.hashfile(storepath)
                 if actualhash != expectedhash:
                     self.ui.warn(
-                        _('changeset %s: %s references corrupted %s\n')
+                        _(b'changeset %s: %s references corrupted %s\n')
                         % (cset, filename, storepath)
                     )
                     failed = True
--- a/hgext/largefiles/overrides.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/largefiles/overrides.py	Sun Oct 06 09:48:39 2019 -0400
@@ -88,9 +88,9 @@
 
     lfmatcher = None
     if lfutil.islfilesrepo(repo):
-        lfpats = ui.configlist(lfutil.longname, 'patterns')
+        lfpats = ui.configlist(lfutil.longname, b'patterns')
         if lfpats:
-            lfmatcher = matchmod.match(repo.root, '', list(lfpats))
+            lfmatcher = matchmod.match(repo.root, b'', list(lfpats))
 
     lfnames = []
     m = matcher
@@ -106,7 +106,7 @@
         # The normal add code will do that for us.
         if exact and exists:
             if lfile:
-                ui.warn(_('%s already a largefile\n') % uipathfn(f))
+                ui.warn(_(b'%s already a largefile\n') % uipathfn(f))
             continue
 
         if (exact or not exists) and not lfutil.isstandin(f):
@@ -121,7 +121,7 @@
             if large or abovemin or (lfmatcher and lfmatcher(f)):
                 lfnames.append(f)
                 if ui.verbose or not exact:
-                    ui.status(_('adding %s as a largefile\n') % uipathfn(f))
+                    ui.status(_(b'adding %s as a largefile\n') % uipathfn(f))
 
     bad = []
 
@@ -136,11 +136,11 @@
                 lfutil.writestandin(
                     repo,
                     standinname,
-                    hash='',
+                    hash=b'',
                     executable=lfutil.getexecutable(repo.wjoin(f)),
                 )
                 standins.append(standinname)
-                if lfdirstate[f] == 'r':
+                if lfdirstate[f] == b'r':
                     lfdirstate.normallookup(f)
                 else:
                     lfdirstate.add(f)
@@ -177,23 +177,23 @@
     if after:
         remove = deleted
         result = warn(
-            modified + added + clean, _('not removing %s: file still exists\n')
+            modified + added + clean, _(b'not removing %s: file still exists\n')
         )
     else:
         remove = deleted + clean
         result = warn(
             modified,
             _(
-                'not removing %s: file is modified (use -f'
-                ' to force removal)\n'
+                b'not removing %s: file is modified (use -f'
+                b' to force removal)\n'
             ),
         )
         result = (
             warn(
                 added,
                 _(
-                    'not removing %s: file has been marked for add'
-                    ' (use forget to undo)\n'
+                    b'not removing %s: file has been marked for add'
+                    b' (use forget to undo)\n'
                 ),
             )
             or result
@@ -205,7 +205,7 @@
         lfdirstate = lfutil.openlfdirstate(ui, repo)
         for f in sorted(remove):
             if ui.verbose or not m.exact(f):
-                ui.status(_('removing %s\n') % uipathfn(f))
+                ui.status(_(b'removing %s\n') % uipathfn(f))
 
             if not dryrun:
                 if not after:
@@ -234,7 +234,7 @@
 
 # For overriding mercurial.hgweb.webcommands so that largefiles will
 # appear at their right place in the manifests.
-@eh.wrapfunction(webcommands, 'decodepath')
+@eh.wrapfunction(webcommands, b'decodepath')
 def decodepath(orig, path):
     return lfutil.splitstandin(path) or path
 
@@ -243,28 +243,28 @@
 
 
 @eh.wrapcommand(
-    'add',
+    b'add',
     opts=[
-        ('', 'large', None, _('add as largefile')),
-        ('', 'normal', None, _('add as normal file')),
+        (b'', b'large', None, _(b'add as largefile')),
+        (b'', b'normal', None, _(b'add as normal file')),
         (
-            '',
-            'lfsize',
-            '',
+            b'',
+            b'lfsize',
+            b'',
             _(
-                'add all files above this size (in megabytes) '
-                'as largefiles (default: 10)'
+                b'add all files above this size (in megabytes) '
+                b'as largefiles (default: 10)'
             ),
         ),
     ],
 )
 def overrideadd(orig, ui, repo, *pats, **opts):
     if opts.get(r'normal') and opts.get(r'large'):
-        raise error.Abort(_('--normal cannot be used with --large'))
+        raise error.Abort(_(b'--normal cannot be used with --large'))
     return orig(ui, repo, *pats, **opts)
 
 
-@eh.wrapfunction(cmdutil, 'add')
+@eh.wrapfunction(cmdutil, b'add')
 def cmdutiladd(orig, ui, repo, matcher, prefix, uipathfn, explicitonly, **opts):
     # The --normal flag short circuits this override
     if opts.get(r'normal'):
@@ -280,7 +280,7 @@
     return bad
 
 
-@eh.wrapfunction(cmdutil, 'remove')
+@eh.wrapfunction(cmdutil, b'remove')
 def cmdutilremove(
     orig, ui, repo, matcher, prefix, uipathfn, after, force, subrepos, dryrun
 ):
@@ -304,7 +304,7 @@
     )
 
 
-@eh.wrapfunction(subrepo.hgsubrepo, 'status')
+@eh.wrapfunction(subrepo.hgsubrepo, b'status')
 def overridestatusfn(orig, repo, rev2, **opts):
     try:
         repo._repo.lfstatus = True
@@ -313,7 +313,7 @@
         repo._repo.lfstatus = False
 
 
-@eh.wrapcommand('status')
+@eh.wrapcommand(b'status')
 def overridestatus(orig, ui, repo, *pats, **opts):
     try:
         repo.lfstatus = True
@@ -322,7 +322,7 @@
         repo.lfstatus = False
 
 
-@eh.wrapfunction(subrepo.hgsubrepo, 'dirty')
+@eh.wrapfunction(subrepo.hgsubrepo, b'dirty')
 def overridedirty(orig, repo, ignoreupdate=False, missing=False):
     try:
         repo._repo.lfstatus = True
@@ -331,7 +331,7 @@
         repo._repo.lfstatus = False
 
 
-@eh.wrapcommand('log')
+@eh.wrapcommand(b'log')
 def overridelog(orig, ui, repo, *pats, **opts):
     def overridematchandpats(
         orig,
@@ -339,7 +339,7 @@
         pats=(),
         opts=None,
         globbed=False,
-        default='relpath',
+        default=b'relpath',
         badfn=None,
     ):
         """Matcher that merges root directory with .hglf, suitable for log.
@@ -360,13 +360,13 @@
         pats = set(p)
 
         def fixpats(pat, tostandin=lfutil.standin):
-            if pat.startswith('set:'):
+            if pat.startswith(b'set:'):
                 return pat
 
             kindpat = matchmod._patsplit(pat, None)
 
             if kindpat[0] is not None:
-                return kindpat[0] + ':' + tostandin(kindpat[1])
+                return kindpat[0] + b':' + tostandin(kindpat[1])
             return tostandin(kindpat[1])
 
         cwd = repo.getcwd()
@@ -388,7 +388,7 @@
                 if os.path.isabs(cwd):
                     f = f[len(back) :]
                 else:
-                    f = cwd + '/' + f
+                    f = cwd + b'/' + f
                 return back + lfutil.standin(f)
 
         else:
@@ -402,7 +402,7 @@
 
         for i in range(0, len(m._files)):
             # Don't add '.hglf' to m.files, since that is already covered by '.'
-            if m._files[i] == '.':
+            if m._files[i] == b'.':
                 continue
             standin = lfutil.standin(m._files[i])
             # If the "standin" is a directory, append instead of replace to
@@ -427,7 +427,7 @@
 
         m.matchfn = lfmatchfn
 
-        ui.debug('updated patterns: %s\n' % ', '.join(sorted(pats)))
+        ui.debug(b'updated patterns: %s\n' % b', '.join(sorted(pats)))
         return m, pats
 
     # For hg log --patch, the match object is used in two different senses:
@@ -443,35 +443,35 @@
         return lambda ctx: match
 
     wrappedmatchandpats = extensions.wrappedfunction(
-        scmutil, 'matchandpats', overridematchandpats
+        scmutil, b'matchandpats', overridematchandpats
     )
     wrappedmakefilematcher = extensions.wrappedfunction(
-        logcmdutil, '_makenofollowfilematcher', overridemakefilematcher
+        logcmdutil, b'_makenofollowfilematcher', overridemakefilematcher
     )
     with wrappedmatchandpats, wrappedmakefilematcher:
         return orig(ui, repo, *pats, **opts)
 
 
 @eh.wrapcommand(
-    'verify',
+    b'verify',
     opts=[
         (
-            '',
-            'large',
+            b'',
+            b'large',
             None,
-            _('verify that all largefiles in current revision exists'),
+            _(b'verify that all largefiles in current revision exists'),
         ),
         (
-            '',
-            'lfa',
+            b'',
+            b'lfa',
             None,
-            _('verify largefiles in all revisions, not just current'),
+            _(b'verify largefiles in all revisions, not just current'),
         ),
         (
-            '',
-            'lfc',
+            b'',
+            b'lfc',
             None,
-            _('verify local largefile contents, not just existence'),
+            _(b'verify local largefile contents, not just existence'),
         ),
     ],
 )
@@ -487,7 +487,8 @@
 
 
 @eh.wrapcommand(
-    'debugstate', opts=[('', 'large', None, _('display largefiles dirstate'))]
+    b'debugstate',
+    opts=[(b'', b'large', None, _(b'display largefiles dirstate'))],
 )
 def overridedebugstate(orig, ui, repo, *pats, **opts):
     large = opts.pop(r'large', False)
@@ -511,7 +512,7 @@
 # The overridden function filters the unknown files by removing any
 # largefiles. This makes the merge proceed and we can then handle this
 # case further in the overridden calculateupdates function below.
-@eh.wrapfunction(merge, '_checkunknownfile')
+@eh.wrapfunction(merge, b'_checkunknownfile')
 def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
     if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
         return False
@@ -544,7 +545,7 @@
 # Finally, the merge.applyupdates function will then take care of
 # writing the files into the working copy and lfcommands.updatelfiles
 # will update the largefiles.
-@eh.wrapfunction(merge, 'calculateupdates')
+@eh.wrapfunction(merge, b'calculateupdates')
 def overridecalculateupdates(
     origfn, repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs
 ):
@@ -569,71 +570,71 @@
         standin = lfutil.standin(lfile)
         (lm, largs, lmsg) = actions.get(lfile, (None, None, None))
         (sm, sargs, smsg) = actions.get(standin, (None, None, None))
-        if sm in ('g', 'dc') and lm != 'r':
-            if sm == 'dc':
+        if sm in (b'g', b'dc') and lm != b'r':
+            if sm == b'dc':
                 f1, f2, fa, move, anc = sargs
                 sargs = (p2[f2].flags(), False)
             # Case 1: normal file in the working copy, largefile in
             # the second parent
             usermsg = (
                 _(
-                    'remote turned local normal file %s into a largefile\n'
-                    'use (l)argefile or keep (n)ormal file?'
-                    '$$ &Largefile $$ &Normal file'
+                    b'remote turned local normal file %s into a largefile\n'
+                    b'use (l)argefile or keep (n)ormal file?'
+                    b'$$ &Largefile $$ &Normal file'
                 )
                 % lfile
             )
             if repo.ui.promptchoice(usermsg, 0) == 0:  # pick remote largefile
-                actions[lfile] = ('r', None, 'replaced by standin')
-                actions[standin] = ('g', sargs, 'replaces standin')
+                actions[lfile] = (b'r', None, b'replaced by standin')
+                actions[standin] = (b'g', sargs, b'replaces standin')
             else:  # keep local normal file
-                actions[lfile] = ('k', None, 'replaces standin')
+                actions[lfile] = (b'k', None, b'replaces standin')
                 if branchmerge:
-                    actions[standin] = ('k', None, 'replaced by non-standin')
+                    actions[standin] = (b'k', None, b'replaced by non-standin')
                 else:
-                    actions[standin] = ('r', None, 'replaced by non-standin')
-        elif lm in ('g', 'dc') and sm != 'r':
-            if lm == 'dc':
+                    actions[standin] = (b'r', None, b'replaced by non-standin')
+        elif lm in (b'g', b'dc') and sm != b'r':
+            if lm == b'dc':
                 f1, f2, fa, move, anc = largs
                 largs = (p2[f2].flags(), False)
             # Case 2: largefile in the working copy, normal file in
             # the second parent
             usermsg = (
                 _(
-                    'remote turned local largefile %s into a normal file\n'
-                    'keep (l)argefile or use (n)ormal file?'
-                    '$$ &Largefile $$ &Normal file'
+                    b'remote turned local largefile %s into a normal file\n'
+                    b'keep (l)argefile or use (n)ormal file?'
+                    b'$$ &Largefile $$ &Normal file'
                 )
                 % lfile
             )
             if repo.ui.promptchoice(usermsg, 0) == 0:  # keep local largefile
                 if branchmerge:
                     # largefile can be restored from standin safely
-                    actions[lfile] = ('k', None, 'replaced by standin')
-                    actions[standin] = ('k', None, 'replaces standin')
+                    actions[lfile] = (b'k', None, b'replaced by standin')
+                    actions[standin] = (b'k', None, b'replaces standin')
                 else:
                     # "lfile" should be marked as "removed" without
                     # removal of itself
                     actions[lfile] = (
-                        'lfmr',
+                        b'lfmr',
                         None,
-                        'forget non-standin largefile',
+                        b'forget non-standin largefile',
                     )
 
                     # linear-merge should treat this largefile as 're-added'
-                    actions[standin] = ('a', None, 'keep standin')
+                    actions[standin] = (b'a', None, b'keep standin')
             else:  # pick remote normal file
-                actions[lfile] = ('g', largs, 'replaces standin')
-                actions[standin] = ('r', None, 'replaced by non-standin')
+                actions[lfile] = (b'g', largs, b'replaces standin')
+                actions[standin] = (b'r', None, b'replaced by non-standin')
 
     return actions, diverge, renamedelete
 
 
-@eh.wrapfunction(merge, 'recordupdates')
+@eh.wrapfunction(merge, b'recordupdates')
 def mergerecordupdates(orig, repo, actions, branchmerge, getfiledata):
-    if 'lfmr' in actions:
+    if b'lfmr' in actions:
         lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
-        for lfile, args, msg in actions['lfmr']:
+        for lfile, args, msg in actions[b'lfmr']:
             # this should be executed before 'orig', to execute 'remove'
             # before all other actions
             repo.dirstate.remove(lfile)
@@ -646,7 +647,7 @@
 
 # Override filemerge to prompt the user about how they wish to merge
 # largefiles. This will handle identical edits without prompting the user.
-@eh.wrapfunction(filemerge, '_filemerge')
+@eh.wrapfunction(filemerge, b'_filemerge')
 def overridefilemerge(
     origfn, premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=None
 ):
@@ -665,10 +666,10 @@
             dhash == ahash
             or repo.ui.promptchoice(
                 _(
-                    'largefile %s has a merge conflict\nancestor was %s\n'
-                    'you can keep (l)ocal %s or take (o)ther %s.\n'
-                    'what do you want to do?'
-                    '$$ &Local $$ &Other'
+                    b'largefile %s has a merge conflict\nancestor was %s\n'
+                    b'you can keep (l)ocal %s or take (o)ther %s.\n'
+                    b'what do you want to do?'
+                    b'$$ &Local $$ &Other'
                 )
                 % (lfutil.splitstandin(orig), ahash, dhash, ohash),
                 0,
@@ -680,7 +681,7 @@
     return True, 0, False
 
 
-@eh.wrapfunction(copiesmod, 'pathcopies')
+@eh.wrapfunction(copiesmod, b'pathcopies')
 def copiespathcopies(orig, ctx1, ctx2, match=None):
     copies = orig(ctx1, ctx2, match=match)
     updated = {}
@@ -696,7 +697,7 @@
 # checks if the destination largefile already exists. It also keeps a
 # list of copied files so that the largefiles can be copied and the
 # dirstate updated.
-@eh.wrapfunction(cmdutil, 'copy')
+@eh.wrapfunction(cmdutil, b'copy')
 def overridecopy(orig, ui, repo, pats, opts, rename=False):
     # doesn't remove largefile on rename
     if len(pats) < 2:
@@ -717,7 +718,7 @@
         pats=(),
         opts=None,
         globbed=False,
-        default='relpath',
+        default=b'relpath',
         badfn=None,
     ):
         if opts is None:
@@ -725,11 +726,11 @@
         match = orig(ctx, pats, opts, globbed, default, badfn=badfn)
         return composenormalfilematcher(match, manifest)
 
-    with extensions.wrappedfunction(scmutil, 'match', normalfilesmatchfn):
+    with extensions.wrappedfunction(scmutil, b'match', normalfilesmatchfn):
         try:
             result = orig(ui, repo, pats, opts, rename)
         except error.Abort as e:
-            if pycompat.bytestr(e) != _('no files to copy'):
+            if pycompat.bytestr(e) != _(b'no files to copy'):
                 raise e
             else:
                 nonormalfiles = True
@@ -766,7 +767,7 @@
             pats=(),
             opts=None,
             globbed=False,
-            default='relpath',
+            default=b'relpath',
             badfn=None,
         ):
             if opts is None:
@@ -776,7 +777,7 @@
             # directory; we need to remove that now
             for pat in pats:
                 if matchmod.patkind(pat) is None and lfutil.shortname in pat:
-                    newpats.append(pat.replace(lfutil.shortname, ''))
+                    newpats.append(pat.replace(lfutil.shortname, b''))
                 else:
                     newpats.append(pat)
             match = orig(ctx, newpats, opts, globbed, default, badfn=badfn)
@@ -811,14 +812,16 @@
             if lfutil.shortname in src and dest.startswith(
                 repo.wjoin(lfutil.shortname)
             ):
-                destlfile = dest.replace(lfutil.shortname, '')
-                if not opts['force'] and os.path.exists(destlfile):
-                    raise IOError('', _('destination largefile already exists'))
+                destlfile = dest.replace(lfutil.shortname, b'')
+                if not opts[b'force'] and os.path.exists(destlfile):
+                    raise IOError(
+                        b'', _(b'destination largefile already exists')
+                    )
             copiedfiles.append((src, dest))
             orig(src, dest, *args, **kwargs)
 
-        with extensions.wrappedfunction(util, 'copyfile', overridecopyfile):
-            with extensions.wrappedfunction(scmutil, 'match', overridematch):
+        with extensions.wrappedfunction(util, b'copyfile', overridecopyfile):
+            with extensions.wrappedfunction(scmutil, b'match', overridematch):
                 result += orig(ui, repo, listpats, opts, rename)
 
         lfdirstate = lfutil.openlfdirstate(ui, repo)
@@ -826,9 +829,9 @@
             if lfutil.shortname in src and dest.startswith(
                 repo.wjoin(lfutil.shortname)
             ):
-                srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
-                destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
-                destlfiledir = repo.wvfs.dirname(repo.wjoin(destlfile)) or '.'
+                srclfile = src.replace(repo.wjoin(lfutil.standin(b'')), b'')
+                destlfile = dest.replace(repo.wjoin(lfutil.standin(b'')), b'')
+                destlfiledir = repo.wvfs.dirname(repo.wjoin(destlfile)) or b'.'
                 if not os.path.isdir(destlfiledir):
                     os.makedirs(destlfiledir)
                 if rename:
@@ -844,7 +847,7 @@
                 lfdirstate.add(destlfile)
         lfdirstate.write()
     except error.Abort as e:
-        if pycompat.bytestr(e) != _('no files to copy'):
+        if pycompat.bytestr(e) != _(b'no files to copy'):
             raise e
         else:
             nolfiles = True
@@ -852,7 +855,7 @@
         wlock.release()
 
     if nolfiles and nonormalfiles:
-        raise error.Abort(_('no files to copy'))
+        raise error.Abort(_(b'no files to copy'))
 
     return result
 
@@ -866,7 +869,7 @@
 # commits. Update the standins then run the original revert, changing
 # the matcher to hit standins instead of largefiles. Based on the
 # resulting standins update the largefiles.
-@eh.wrapfunction(cmdutil, 'revert')
+@eh.wrapfunction(cmdutil, b'revert')
 def overriderevert(orig, ui, repo, ctx, parents, *pats, **opts):
     # Because we put the standins in a bad state (by updating them)
     # and then return them to a correct state we need to lock to
@@ -890,7 +893,7 @@
             pats=(),
             opts=None,
             globbed=False,
-            default='relpath',
+            default=b'relpath',
             badfn=None,
         ):
             if opts is None:
@@ -912,7 +915,7 @@
                 standin = lfutil.standin(f)
                 if standin in ctx or standin in mctx:
                     matchfiles.append(standin)
-                elif standin in wctx or lfdirstate[f] == 'r':
+                elif standin in wctx or lfdirstate[f] == b'r':
                     continue
                 else:
                     matchfiles.append(f)
@@ -929,7 +932,7 @@
             m.matchfn = matchfn
             return m
 
-        with extensions.wrappedfunction(scmutil, 'match', overridematch):
+        with extensions.wrappedfunction(scmutil, b'match', overridematch):
             orig(ui, repo, ctx, parents, *pats, **opts)
 
         newstandins = lfutil.getstandinsstate(repo)
@@ -947,51 +950,57 @@
 # after pulling changesets, we need to take some extra care to get
 # largefiles updated remotely
 @eh.wrapcommand(
-    'pull',
+    b'pull',
     opts=[
         (
-            '',
-            'all-largefiles',
+            b'',
+            b'all-largefiles',
             None,
-            _('download all pulled versions of largefiles (DEPRECATED)'),
+            _(b'download all pulled versions of largefiles (DEPRECATED)'),
         ),
         (
-            '',
-            'lfrev',
+            b'',
+            b'lfrev',
             [],
-            _('download largefiles for these revisions'),
-            _('REV'),
+            _(b'download largefiles for these revisions'),
+            _(b'REV'),
         ),
     ],
 )
 def overridepull(orig, ui, repo, source=None, **opts):
     revsprepull = len(repo)
     if not source:
-        source = 'default'
+        source = b'default'
     repo.lfpullsource = source
     result = orig(ui, repo, source, **opts)
     revspostpull = len(repo)
     lfrevs = opts.get(r'lfrev', [])
     if opts.get(r'all_largefiles'):
-        lfrevs.append('pulled()')
+        lfrevs.append(b'pulled()')
     if lfrevs and revspostpull > revsprepull:
         numcached = 0
         repo.firstpulled = revsprepull  # for pulled() revset expression
         try:
             for rev in scmutil.revrange(repo, lfrevs):
-                ui.note(_('pulling largefiles for revision %d\n') % rev)
+                ui.note(_(b'pulling largefiles for revision %d\n') % rev)
                 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
                 numcached += len(cached)
         finally:
             del repo.firstpulled
-        ui.status(_("%d largefiles cached\n") % numcached)
+        ui.status(_(b"%d largefiles cached\n") % numcached)
     return result
 
 
 @eh.wrapcommand(
-    'push',
+    b'push',
     opts=[
-        ('', 'lfrev', [], _('upload largefiles for these revisions'), _('REV'))
+        (
+            b'',
+            b'lfrev',
+            [],
+            _(b'upload largefiles for these revisions'),
+            _(b'REV'),
+        )
     ],
 )
 def overridepush(orig, ui, repo, *args, **kwargs):
@@ -999,11 +1008,11 @@
     lfrevs = kwargs.pop(r'lfrev', None)
     if lfrevs:
         opargs = kwargs.setdefault(r'opargs', {})
-        opargs['lfrevs'] = scmutil.revrange(repo, lfrevs)
+        opargs[b'lfrevs'] = scmutil.revrange(repo, lfrevs)
     return orig(ui, repo, *args, **kwargs)
 
 
-@eh.wrapfunction(exchange, 'pushoperation')
+@eh.wrapfunction(exchange, b'pushoperation')
 def exchangepushoperation(orig, *args, **kwargs):
     """Override pushoperation constructor and store lfrevs parameter"""
     lfrevs = kwargs.pop(r'lfrevs', None)
@@ -1012,7 +1021,7 @@
     return pushop
 
 
-@eh.revsetpredicate('pulled()')
+@eh.revsetpredicate(b'pulled()')
 def pulledrevsetsymbol(repo, subset, x):
     """Changesets that just has been pulled.
 
@@ -1035,18 +1044,18 @@
     try:
         firstpulled = repo.firstpulled
     except AttributeError:
-        raise error.Abort(_("pulled() only available in --lfrev"))
+        raise error.Abort(_(b"pulled() only available in --lfrev"))
     return smartset.baseset([r for r in subset if r >= firstpulled])
 
 
 @eh.wrapcommand(
-    'clone',
+    b'clone',
     opts=[
         (
-            '',
-            'all-largefiles',
+            b'',
+            b'all-largefiles',
             None,
-            _('download all versions of all largefiles'),
+            _(b'download all versions of all largefiles'),
         )
     ],
 )
@@ -1056,14 +1065,14 @@
         d = hg.defaultdest(source)
     if opts.get(r'all_largefiles') and not hg.islocal(d):
         raise error.Abort(
-            _('--all-largefiles is incompatible with non-local destination %s')
+            _(b'--all-largefiles is incompatible with non-local destination %s')
             % d
         )
 
     return orig(ui, source, dest, **opts)
 
 
-@eh.wrapfunction(hg, 'clone')
+@eh.wrapfunction(hg, b'clone')
 def hgclone(orig, ui, opts, *args, **kwargs):
     result = orig(ui, opts, *args, **kwargs)
 
@@ -1080,7 +1089,7 @@
         # Caching is implicitly limited to 'rev' option, since the dest repo was
         # truncated at that point.  The user may expect a download count with
         # this option, so attempt whether or not this is a largefile repo.
-        if opts.get('all_largefiles'):
+        if opts.get(b'all_largefiles'):
             success, missing = lfcommands.downloadlfiles(ui, repo, None)
 
             if missing != 0:
@@ -1089,9 +1098,9 @@
     return result
 
 
-@eh.wrapcommand('rebase', extension='rebase')
+@eh.wrapcommand(b'rebase', extension=b'rebase')
 def overriderebase(orig, ui, repo, **opts):
-    if not util.safehasattr(repo, '_largefilesenabled'):
+    if not util.safehasattr(repo, b'_largefilesenabled'):
         return orig(ui, repo, **opts)
 
     resuming = opts.get(r'continue')
@@ -1104,7 +1113,7 @@
         repo._lfcommithooks.pop()
 
 
-@eh.wrapcommand('archive')
+@eh.wrapcommand(b'archive')
 def overridearchivecmd(orig, ui, repo, dest, **opts):
     repo.unfiltered().lfstatus = True
 
@@ -1114,7 +1123,7 @@
         repo.unfiltered().lfstatus = False
 
 
-@eh.wrapfunction(webcommands, 'archive')
+@eh.wrapfunction(webcommands, b'archive')
 def hgwebarchive(orig, web):
     web.repo.lfstatus = True
 
@@ -1124,7 +1133,7 @@
         web.repo.lfstatus = False
 
 
-@eh.wrapfunction(archival, 'archive')
+@eh.wrapfunction(archival, b'archive')
 def overridearchive(
     orig,
     repo,
@@ -1133,7 +1142,7 @@
     kind,
     decode=True,
     match=None,
-    prefix='',
+    prefix=b'',
     mtime=None,
     subrepos=None,
 ):
@@ -1150,13 +1159,13 @@
         lfcommands.cachelfiles(repo.ui, repo, node)
 
     if kind not in archival.archivers:
-        raise error.Abort(_("unknown archive type '%s'") % kind)
+        raise error.Abort(_(b"unknown archive type '%s'") % kind)
 
     ctx = repo[node]
 
-    if kind == 'files':
+    if kind == b'files':
         if prefix:
-            raise error.Abort(_('cannot give prefix when archiving to files'))
+            raise error.Abort(_(b'cannot give prefix when archiving to files'))
     else:
         prefix = archival.tidyprefix(dest, kind, prefix)
 
@@ -1170,9 +1179,9 @@
 
     archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
 
-    if repo.ui.configbool("ui", "archivemeta"):
+    if repo.ui.configbool(b"ui", b"archivemeta"):
         write(
-            '.hg_archival.txt',
+            b'.hg_archival.txt',
             0o644,
             False,
             lambda: archival.buildmetadata(ctx),
@@ -1189,7 +1198,7 @@
                 if path is None:
                     raise error.Abort(
                         _(
-                            'largefile %s not found in repo store or system cache'
+                            b'largefile %s not found in repo store or system cache'
                         )
                         % lfile
                     )
@@ -1199,26 +1208,26 @@
             f = lfile
 
             getdata = lambda: util.readfile(path)
-        write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, getdata)
+        write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, getdata)
 
     if subrepos:
         for subpath in sorted(ctx.substate):
             sub = ctx.workingsub(subpath)
             submatch = matchmod.subdirmatcher(subpath, match)
-            subprefix = prefix + subpath + '/'
+            subprefix = prefix + subpath + b'/'
             sub._repo.lfstatus = True
             sub.archive(archiver, subprefix, submatch)
 
     archiver.done()
 
 
-@eh.wrapfunction(subrepo.hgsubrepo, 'archive')
+@eh.wrapfunction(subrepo.hgsubrepo, b'archive')
 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None, decode=True):
-    lfenabled = util.safehasattr(repo._repo, '_largefilesenabled')
+    lfenabled = util.safehasattr(repo._repo, b'_largefilesenabled')
     if not lfenabled or not repo._repo.lfstatus:
         return orig(repo, archiver, prefix, match, decode)
 
-    repo._get(repo._state + ('hg',))
+    repo._get(repo._state + (b'hg',))
     rev = repo._state[1]
     ctx = repo._repo[rev]
 
@@ -1247,7 +1256,7 @@
                 if path is None:
                     raise error.Abort(
                         _(
-                            'largefile %s not found in repo store or system cache'
+                            b'largefile %s not found in repo store or system cache'
                         )
                         % lfile
                     )
@@ -1258,12 +1267,12 @@
 
             getdata = lambda: util.readfile(os.path.join(prefix, path))
 
-        write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, getdata)
+        write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, getdata)
 
     for subpath in sorted(ctx.substate):
         sub = ctx.workingsub(subpath)
         submatch = matchmod.subdirmatcher(subpath, match)
-        subprefix = prefix + subpath + '/'
+        subprefix = prefix + subpath + b'/'
         sub._repo.lfstatus = True
         sub.archive(archiver, subprefix, submatch, decode)
 
@@ -1272,17 +1281,17 @@
 # standin until a commit. cmdutil.bailifchanged() raises an exception
 # if the repo has uncommitted changes. Wrap it to also check if
 # largefiles were changed. This is used by bisect, backout and fetch.
-@eh.wrapfunction(cmdutil, 'bailifchanged')
+@eh.wrapfunction(cmdutil, b'bailifchanged')
 def overridebailifchanged(orig, repo, *args, **kwargs):
     orig(repo, *args, **kwargs)
     repo.lfstatus = True
     s = repo.status()
     repo.lfstatus = False
     if s.modified or s.added or s.removed or s.deleted:
-        raise error.Abort(_('uncommitted changes'))
+        raise error.Abort(_(b'uncommitted changes'))
 
 
-@eh.wrapfunction(cmdutil, 'postcommitstatus')
+@eh.wrapfunction(cmdutil, b'postcommitstatus')
 def postcommitstatus(orig, repo, *args, **kwargs):
     repo.lfstatus = True
     try:
@@ -1291,7 +1300,7 @@
         repo.lfstatus = False
 
 
-@eh.wrapfunction(cmdutil, 'forget')
+@eh.wrapfunction(cmdutil, b'forget')
 def cmdutilforget(
     orig, ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
 ):
@@ -1321,20 +1330,20 @@
         fstandin = lfutil.standin(f)
         if fstandin not in repo.dirstate and not repo.wvfs.isdir(fstandin):
             ui.warn(
-                _('not removing %s: file is already untracked\n') % uipathfn(f)
+                _(b'not removing %s: file is already untracked\n') % uipathfn(f)
             )
             bad.append(f)
 
     for f in forget:
         if ui.verbose or not m.exact(f):
-            ui.status(_('removing %s\n') % uipathfn(f))
+            ui.status(_(b'removing %s\n') % uipathfn(f))
 
     # Need to lock because standin files are deleted then removed from the
     # repository and we could race in-between.
     with repo.wlock():
         lfdirstate = lfutil.openlfdirstate(ui, repo)
         for f in forget:
-            if lfdirstate[f] == 'a':
+            if lfdirstate[f] == b'a':
                 lfdirstate.drop(f)
             else:
                 lfdirstate.remove(f)
@@ -1376,7 +1385,7 @@
 
 
 def outgoinghook(ui, repo, other, opts, missing):
-    if opts.pop('large', None):
+    if opts.pop(b'large', None):
         lfhashes = set()
         if ui.debugflag:
             toupload = {}
@@ -1389,7 +1398,7 @@
 
             def showhashes(fn):
                 for lfhash in sorted(toupload[fn]):
-                    ui.debug('    %s\n' % lfhash)
+                    ui.debug(b'    %s\n' % lfhash)
 
         else:
             toupload = set()
@@ -1404,19 +1413,19 @@
         _getoutgoings(repo, other, missing, addfunc)
 
         if not toupload:
-            ui.status(_('largefiles: no files to upload\n'))
+            ui.status(_(b'largefiles: no files to upload\n'))
         else:
             ui.status(
-                _('largefiles to upload (%d entities):\n') % (len(lfhashes))
+                _(b'largefiles to upload (%d entities):\n') % (len(lfhashes))
             )
             for file in sorted(toupload):
-                ui.status(lfutil.splitstandin(file) + '\n')
+                ui.status(lfutil.splitstandin(file) + b'\n')
                 showhashes(file)
-            ui.status('\n')
+            ui.status(b'\n')
 
 
 @eh.wrapcommand(
-    'outgoing', opts=[('', 'large', None, _('display outgoing largefiles'))]
+    b'outgoing', opts=[(b'', b'large', None, _(b'display outgoing largefiles'))]
 )
 def _outgoingcmd(orig, *args, **kwargs):
     # Nothing to do here other than add the extra help option- the hook above
@@ -1425,7 +1434,7 @@
 
 
 def summaryremotehook(ui, repo, opts, changes):
-    largeopt = opts.get('large', False)
+    largeopt = opts.get(b'large', False)
     if changes is None:
         if largeopt:
             return (False, True)  # only outgoing check is needed
@@ -1435,7 +1444,7 @@
         url, branch, peer, outgoing = changes[1]
         if peer is None:
             # i18n: column positioning for "hg summary"
-            ui.status(_('largefiles: (no remote repo)\n'))
+            ui.status(_(b'largefiles: (no remote repo)\n'))
             return
 
         toupload = set()
@@ -1449,17 +1458,17 @@
 
         if not toupload:
             # i18n: column positioning for "hg summary"
-            ui.status(_('largefiles: (no files to upload)\n'))
+            ui.status(_(b'largefiles: (no files to upload)\n'))
         else:
             # i18n: column positioning for "hg summary"
             ui.status(
-                _('largefiles: %d entities for %d files to upload\n')
+                _(b'largefiles: %d entities for %d files to upload\n')
                 % (len(lfhashes), len(toupload))
             )
 
 
 @eh.wrapcommand(
-    'summary', opts=[('', 'large', None, _('display outgoing largefiles'))]
+    b'summary', opts=[(b'', b'large', None, _(b'display outgoing largefiles'))]
 )
 def overridesummary(orig, ui, repo, *pats, **opts):
     try:
@@ -1469,7 +1478,7 @@
         repo.lfstatus = False
 
 
-@eh.wrapfunction(scmutil, 'addremove')
+@eh.wrapfunction(scmutil, b'addremove')
 def scmutiladdremove(orig, repo, matcher, prefix, uipathfn, opts=None):
     if opts is None:
         opts = {}
@@ -1505,7 +1514,7 @@
             True,
             m,
             uipathfn,
-            opts.get('dry_run'),
+            opts.get(b'dry_run'),
             **pycompat.strkwargs(opts)
         )
     # Call into the normal add code, and any files that *should* be added as
@@ -1522,7 +1531,7 @@
 
 # Calling purge with --all will cause the largefiles to be deleted.
 # Override repo.status to prevent this from happening.
-@eh.wrapcommand('purge', extension='purge')
+@eh.wrapcommand(b'purge', extension=b'purge')
 def overridepurge(orig, ui, repo, *dirs, **opts):
     # XXX Monkey patching a repoview will not work. The assigned attribute will
     # be set on the unfiltered repo, but we will only lookup attributes in the
@@ -1537,7 +1546,7 @@
     oldstatus = repo.status
 
     def overridestatus(
-        node1='.',
+        node1=b'.',
         node2=None,
         match=None,
         ignored=False,
@@ -1549,8 +1558,8 @@
             node1, node2, match, ignored, clean, unknown, listsubrepos
         )
         lfdirstate = lfutil.openlfdirstate(ui, repo)
-        unknown = [f for f in r.unknown if lfdirstate[f] == '?']
-        ignored = [f for f in r.ignored if lfdirstate[f] == '?']
+        unknown = [f for f in r.unknown if lfdirstate[f] == b'?']
+        ignored = [f for f in r.ignored if lfdirstate[f] == b'?']
         return scmutil.status(
             r.modified, r.added, r.removed, r.deleted, unknown, ignored, r.clean
         )
@@ -1560,25 +1569,25 @@
     repo.status = oldstatus
 
 
-@eh.wrapcommand('rollback')
+@eh.wrapcommand(b'rollback')
 def overriderollback(orig, ui, repo, **opts):
     with repo.wlock():
         before = repo.dirstate.parents()
         orphans = set(
             f
             for f in repo.dirstate
-            if lfutil.isstandin(f) and repo.dirstate[f] != 'r'
+            if lfutil.isstandin(f) and repo.dirstate[f] != b'r'
         )
         result = orig(ui, repo, **opts)
         after = repo.dirstate.parents()
         if before == after:
             return result  # no need to restore standins
 
-        pctx = repo['.']
+        pctx = repo[b'.']
         for f in repo.dirstate:
             if lfutil.isstandin(f):
                 orphans.discard(f)
-                if repo.dirstate[f] == 'r':
+                if repo.dirstate[f] == b'r':
                     repo.wvfs.unlinkpath(f, ignoremissing=True)
                 elif f in pctx:
                     fctx = pctx[f]
@@ -1586,7 +1595,7 @@
                 else:
                     # content of standin is not so important in 'a',
                     # 'm' or 'n' (coming from the 2nd parent) cases
-                    lfutil.writestandin(repo, f, '', False)
+                    lfutil.writestandin(repo, f, b'', False)
         for standin in orphans:
             repo.wvfs.unlinkpath(standin, ignoremissing=True)
 
@@ -1602,7 +1611,7 @@
     return result
 
 
-@eh.wrapcommand('transplant', extension='transplant')
+@eh.wrapcommand(b'transplant', extension=b'transplant')
 def overridetransplant(orig, ui, repo, *revs, **opts):
     resuming = opts.get(r'continue')
     repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
@@ -1615,10 +1624,10 @@
     return result
 
 
-@eh.wrapcommand('cat')
+@eh.wrapcommand(b'cat')
 def overridecat(orig, ui, repo, file1, *pats, **opts):
     opts = pycompat.byteskwargs(opts)
-    ctx = scmutil.revsingle(repo, opts.get('rev'))
+    ctx = scmutil.revsingle(repo, opts.get(b'rev'))
     err = 1
     notbad = set()
     m = scmutil.match(ctx, (file1,) + pats, opts)
@@ -1658,12 +1667,12 @@
     m.visitdir = lfvisitdirfn
 
     for f in ctx.walk(m):
-        with cmdutil.makefileobj(ctx, opts.get('output'), pathname=f) as fp:
+        with cmdutil.makefileobj(ctx, opts.get(b'output'), pathname=f) as fp:
             lf = lfutil.splitstandin(f)
             if lf is None or origmatchfn(f):
                 # duplicating unreachable code from commands.cat
                 data = ctx[f].data()
-                if opts.get('decode'):
+                if opts.get(b'decode'):
                     data = repo.wwritedata(f, data)
                 fp.write(data)
             else:
@@ -1674,20 +1683,20 @@
                     if len(success) != 1:
                         raise error.Abort(
                             _(
-                                'largefile %s is not in cache and could not be '
-                                'downloaded'
+                                b'largefile %s is not in cache and could not be '
+                                b'downloaded'
                             )
                             % lf
                         )
                 path = lfutil.usercachepath(repo.ui, hash)
-                with open(path, "rb") as fpin:
+                with open(path, b"rb") as fpin:
                     for chunk in util.filechunkiter(fpin):
                         fp.write(chunk)
         err = 0
     return err
 
 
-@eh.wrapfunction(merge, 'update')
+@eh.wrapfunction(merge, b'update')
 def mergeupdate(orig, repo, node, branchmerge, force, *args, **kwargs):
     matcher = kwargs.get(r'matcher', None)
     # note if this is a partial update
@@ -1717,7 +1726,7 @@
             unknown=False,
         )
         oldclean = set(s.clean)
-        pctx = repo['.']
+        pctx = repo[b'.']
         dctx = repo[node]
         for lfile in unsure + s.modified:
             lfileabs = repo.wvfs.join(lfile)
@@ -1771,7 +1780,7 @@
         return result
 
 
-@eh.wrapfunction(scmutil, 'marktouched')
+@eh.wrapfunction(scmutil, b'marktouched')
 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
     result = orig(repo, files, *args, **kwargs)
 
@@ -1792,23 +1801,23 @@
     return result
 
 
-@eh.wrapfunction(upgrade, 'preservedrequirements')
-@eh.wrapfunction(upgrade, 'supporteddestrequirements')
+@eh.wrapfunction(upgrade, b'preservedrequirements')
+@eh.wrapfunction(upgrade, b'supporteddestrequirements')
 def upgraderequirements(orig, repo):
     reqs = orig(repo)
-    if 'largefiles' in repo.requirements:
-        reqs.add('largefiles')
+    if b'largefiles' in repo.requirements:
+        reqs.add(b'largefiles')
     return reqs
 
 
-_lfscheme = 'largefile://'
+_lfscheme = b'largefile://'
 
 
-@eh.wrapfunction(urlmod, 'open')
+@eh.wrapfunction(urlmod, b'open')
 def openlargefile(orig, ui, url_, data=None):
     if url_.startswith(_lfscheme):
         if data:
-            msg = "cannot use data on a 'largefile://' url"
+            msg = b"cannot use data on a 'largefile://' url"
             raise error.ProgrammingError(msg)
         lfid = url_[len(_lfscheme) :]
         return storefactory.getlfile(ui, lfid)
--- a/hgext/largefiles/proto.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/largefiles/proto.py	Sun Oct 06 09:48:39 2019 -0400
@@ -25,9 +25,9 @@
 urlreq = util.urlreq
 
 LARGEFILES_REQUIRED_MSG = (
-    '\nThis repository uses the largefiles extension.'
-    '\n\nPlease enable it in your Mercurial config '
-    'file.\n'
+    b'\nThis repository uses the largefiles extension.'
+    b'\n\nPlease enable it in your Mercurial config '
+    b'file.\n'
 )
 
 eh = exthelper.exthelper()
@@ -50,21 +50,21 @@
                 tmpfp.write(p)
             tmpfp._fp.seek(0)
             if sha != lfutil.hexsha1(tmpfp._fp):
-                raise IOError(0, _('largefile contents do not match hash'))
+                raise IOError(0, _(b'largefile contents do not match hash'))
             tmpfp.close()
             lfutil.linktousercache(repo, sha)
         except IOError as e:
             repo.ui.warn(
-                _('largefiles: failed to put %s into store: %s\n')
+                _(b'largefiles: failed to put %s into store: %s\n')
                 % (sha, e.strerror)
             )
             return wireprototypes.pushres(
-                1, output.getvalue() if output else ''
+                1, output.getvalue() if output else b''
             )
         finally:
             tmpfp.discard()
 
-    return wireprototypes.pushres(0, output.getvalue() if output else '')
+    return wireprototypes.pushres(0, output.getvalue() if output else b'')
 
 
 def getlfile(repo, proto, sha):
@@ -73,9 +73,9 @@
     filename = lfutil.findfile(repo, sha)
     if not filename:
         raise error.Abort(
-            _('requested largefile %s not present in cache') % sha
+            _(b'requested largefile %s not present in cache') % sha
         )
-    f = open(filename, 'rb')
+    f = open(filename, b'rb')
     length = os.fstat(f.fileno())[6]
 
     # Since we can't set an HTTP content-length header here, and
@@ -84,7 +84,7 @@
     # just send the length on the first line of the response, like the
     # ssh proto does for string responses.
     def generator():
-        yield '%d\n' % length
+        yield b'%d\n' % length
         for chunk in util.filechunkiter(f):
             yield chunk
 
@@ -100,8 +100,8 @@
     server side.'''
     filename = lfutil.findfile(repo, sha)
     if not filename:
-        return wireprototypes.bytesresponse('2\n')
-    return wireprototypes.bytesresponse('0\n')
+        return wireprototypes.bytesresponse(b'2\n')
+    return wireprototypes.bytesresponse(b'0\n')
 
 
 def wirereposetup(ui, repo):
@@ -112,44 +112,46 @@
             # it ...
             if issubclass(self.__class__, httppeer.httppeer):
                 res = self._call(
-                    'putlfile',
+                    b'putlfile',
                     data=fd,
                     sha=sha,
                     headers={r'content-type': r'application/mercurial-0.1'},
                 )
                 try:
-                    d, output = res.split('\n', 1)
+                    d, output = res.split(b'\n', 1)
                     for l in output.splitlines(True):
-                        self.ui.warn(_('remote: '), l)  # assume l ends with \n
+                        self.ui.warn(_(b'remote: '), l)  # assume l ends with \n
                     return int(d)
                 except ValueError:
-                    self.ui.warn(_('unexpected putlfile response: %r\n') % res)
+                    self.ui.warn(_(b'unexpected putlfile response: %r\n') % res)
                     return 1
             # ... but we can't use sshrepository._call because the data=
             # argument won't get sent, and _callpush does exactly what we want
             # in this case: send the data straight through
             else:
                 try:
-                    ret, output = self._callpush("putlfile", fd, sha=sha)
-                    if ret == "":
-                        raise error.ResponseError(_('putlfile failed:'), output)
+                    ret, output = self._callpush(b"putlfile", fd, sha=sha)
+                    if ret == b"":
+                        raise error.ResponseError(
+                            _(b'putlfile failed:'), output
+                        )
                     return int(ret)
                 except IOError:
                     return 1
                 except ValueError:
                     raise error.ResponseError(
-                        _('putlfile failed (unexpected response):'), ret
+                        _(b'putlfile failed (unexpected response):'), ret
                     )
 
         def getlfile(self, sha):
             """returns an iterable with the chunks of the file with sha sha"""
-            stream = self._callstream("getlfile", sha=sha)
+            stream = self._callstream(b"getlfile", sha=sha)
             length = stream.readline()
             try:
                 length = int(length)
             except ValueError:
                 self._abort(
-                    error.ResponseError(_("unexpected response:"), length)
+                    error.ResponseError(_(b"unexpected response:"), length)
                 )
 
             # SSH streams will block if reading more than length
@@ -161,13 +163,13 @@
                 chunk = stream.read(1)
                 if chunk:
                     self._abort(
-                        error.ResponseError(_("unexpected response:"), chunk)
+                        error.ResponseError(_(b"unexpected response:"), chunk)
                     )
 
         @wireprotov1peer.batchable
         def statlfile(self, sha):
             f = wireprotov1peer.future()
-            result = {'sha': sha}
+            result = {b'sha': sha}
             yield result, f
             try:
                 yield int(f.value)
@@ -182,11 +184,11 @@
 
 
 # advertise the largefiles=serve capability
-@eh.wrapfunction(wireprotov1server, '_capabilities')
+@eh.wrapfunction(wireprotov1server, b'_capabilities')
 def _capabilities(orig, repo, proto):
     '''announce largefile server capability'''
     caps = orig(repo, proto)
-    caps.append('largefiles=serve')
+    caps.append(b'largefiles=serve')
     return caps
 
 
@@ -200,10 +202,10 @@
 
 
 def sshrepocallstream(self, cmd, **args):
-    if cmd == 'heads' and self.capable('largefiles'):
-        cmd = 'lheads'
-    if cmd == 'batch' and self.capable('largefiles'):
-        args[r'cmds'] = args[r'cmds'].replace('heads ', 'lheads ')
+    if cmd == b'heads' and self.capable(b'largefiles'):
+        cmd = b'lheads'
+    if cmd == b'batch' and self.capable(b'largefiles'):
+        args[r'cmds'] = args[r'cmds'].replace(b'heads ', b'lheads ')
     return ssholdcallstream(self, cmd, **args)
 
 
@@ -211,8 +213,8 @@
 
 
 def httprepocallstream(self, cmd, **args):
-    if cmd == 'heads' and self.capable('largefiles'):
-        cmd = 'lheads'
-    if cmd == 'batch' and self.capable('largefiles'):
-        args[r'cmds'] = headsre.sub('lheads', args[r'cmds'])
+    if cmd == b'heads' and self.capable(b'largefiles'):
+        cmd = b'lheads'
+    if cmd == b'batch' and self.capable(b'largefiles'):
+        args[r'cmds'] = headsre.sub(b'lheads', args[r'cmds'])
     return httpoldcallstream(self, cmd, **args)
--- a/hgext/largefiles/remotestore.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/largefiles/remotestore.py	Sun Oct 06 09:48:39 2019 -0400
@@ -38,11 +38,11 @@
     def put(self, source, hash):
         if self.sendfile(source, hash):
             raise error.Abort(
-                _('remotestore: could not put %s to remote store %s')
+                _(b'remotestore: could not put %s to remote store %s')
                 % (source, util.hidepassword(self.url))
             )
         self.ui.debug(
-            _('remotestore: put %s to remote store %s\n')
+            _(b'remotestore: put %s to remote store %s\n')
             % (source, util.hidepassword(self.url))
         )
 
@@ -53,13 +53,13 @@
         )
 
     def sendfile(self, filename, hash):
-        self.ui.debug('remotestore: sendfile(%s, %s)\n' % (filename, hash))
+        self.ui.debug(b'remotestore: sendfile(%s, %s)\n' % (filename, hash))
         try:
             with lfutil.httpsendfile(self.ui, filename) as fd:
                 return self._put(hash, fd)
         except IOError as e:
             raise error.Abort(
-                _('remotestore: could not open file %s: %s')
+                _(b'remotestore: could not open file %s: %s')
                 % (filename, stringutil.forcebytestr(e))
             )
 
@@ -77,7 +77,7 @@
             # keep trying with the other files... they will probably
             # all fail too.
             raise error.Abort(
-                '%s: %s' % (util.hidepassword(self.url), e.reason)
+                b'%s: %s' % (util.hidepassword(self.url), e.reason)
             )
         except IOError as e:
             raise basestore.StoreError(
@@ -118,33 +118,33 @@
                 if stat:
                     if stat == 1:
                         self.ui.warn(
-                            _('changeset %s: %s: contents differ\n')
+                            _(b'changeset %s: %s: contents differ\n')
                             % (cset, filename)
                         )
                         failed = True
                     elif stat == 2:
                         self.ui.warn(
-                            _('changeset %s: %s missing\n') % (cset, filename)
+                            _(b'changeset %s: %s missing\n') % (cset, filename)
                         )
                         failed = True
                     else:
                         raise RuntimeError(
-                            'verify failed: unexpected response '
-                            'from statlfile (%r)' % stat
+                            b'verify failed: unexpected response '
+                            b'from statlfile (%r)' % stat
                         )
         return failed
 
     def _put(self, hash, fd):
         '''Put file with the given hash in the remote store.'''
-        raise NotImplementedError('abstract method')
+        raise NotImplementedError(b'abstract method')
 
     def _get(self, hash):
         '''Get a iterator for content with the given hash.'''
-        raise NotImplementedError('abstract method')
+        raise NotImplementedError(b'abstract method')
 
     def _stat(self, hashes):
         '''Get information about availability of files specified by
         hashes in the remote store. Return dictionary mapping hashes
         to return code where 0 means that file is available, other
         values if not.'''
-        raise NotImplementedError('abstract method')
+        raise NotImplementedError(b'abstract method')
--- a/hgext/largefiles/reposetup.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/largefiles/reposetup.py	Sun Oct 06 09:48:39 2019 -0400
@@ -84,7 +84,7 @@
                             else:
                                 result = orig(lfutil.standin(path), fileid)
                             olddata = result.data
-                            result.data = lambda: olddata() + '\0'
+                            result.data = lambda: olddata() + b'\0'
                         return result
 
                 ctx.__class__ = lfilesctx
@@ -99,7 +99,7 @@
         @localrepo.unfilteredmethod
         def status(
             self,
-            node1='.',
+            node1=b'.',
             node2=None,
             match=None,
             ignored=False,
@@ -125,7 +125,7 @@
             ctx1 = self[node1]
             ctx2 = self[node2]
             working = ctx2.rev() is None
-            parentworking = working and ctx1 == self['.']
+            parentworking = working and ctx1 == self[b'.']
 
             if match is None:
                 match = matchmod.always()
@@ -236,7 +236,7 @@
                                     != lfutil.hashfile(abslfile)
                                 ) or (
                                     checkexec
-                                    and ('x' in ctx1.flags(standin))
+                                    and (b'x' in ctx1.flags(standin))
                                     != bool(lfutil.getexecutable(abslfile))
                                 ):
                                     modified.append(lfile)
@@ -334,7 +334,7 @@
         # Do that here.
         def commit(
             self,
-            text="",
+            text=b"",
             user=None,
             date=None,
             match=None,
@@ -365,10 +365,10 @@
                 missing = set(self.requirements) - remote.local().supported
                 if missing:
                     msg = _(
-                        "required features are not"
-                        " supported in the destination:"
-                        " %s"
-                    ) % (', '.join(sorted(missing)))
+                        b"required features are not"
+                        b" supported in the destination:"
+                        b" %s"
+                    ) % (b', '.join(sorted(missing)))
                     raise error.Abort(msg)
             return super(lfilesrepo, self).push(
                 remote, force=force, revs=revs, newbranch=newbranch
@@ -393,10 +393,10 @@
             regulars = []
 
             for f in files:
-                if lfutil.isstandin(f + '/'):
+                if lfutil.isstandin(f + b'/'):
                     raise error.Abort(
-                        _('file "%s" is a largefile standin') % f,
-                        hint='commit the largefile itself instead',
+                        _(b'file "%s" is a largefile standin') % f,
+                        hint=b'commit the largefile itself instead',
                     )
                 # Scan directories
                 if self.wvfs.isdir(f):
@@ -406,7 +406,7 @@
 
             for f in dirs:
                 matcheddir = False
-                d = self.dirstate.normalize(f) + '/'
+                d = self.dirstate.normalize(f) + b'/'
                 # Check for matched normal files
                 for mf in regulars:
                     if self.dirstate.normalize(mf).startswith(d):
@@ -425,7 +425,7 @@
                                 # forces status/dirstate to walk all files and
                                 # call the match function on the matcher, even
                                 # on case sensitive filesystems.
-                                actualfiles.append('.')
+                                actualfiles.append(b'.')
                                 matcheddir = True
                 # Nothing in dir, so readd it
                 # and let commit reject it
@@ -458,16 +458,16 @@
             lfutil.getlfilestoupload(pushop.repo, lfrevs, addfunc)
             lfcommands.uploadlfiles(ui, pushop.repo, pushop.remote, toupload)
 
-    repo.prepushoutgoinghooks.add("largefiles", prepushoutgoinghook)
+    repo.prepushoutgoinghooks.add(b"largefiles", prepushoutgoinghook)
 
     def checkrequireslfiles(ui, repo, **kwargs):
-        if 'largefiles' not in repo.requirements and any(
-            lfutil.shortname + '/' in f[0] for f in repo.store.datafiles()
+        if b'largefiles' not in repo.requirements and any(
+            lfutil.shortname + b'/' in f[0] for f in repo.store.datafiles()
         ):
-            repo.requirements.add('largefiles')
+            repo.requirements.add(b'largefiles')
             repo._writerequirements()
 
     ui.setconfig(
-        'hooks', 'changegroup.lfiles', checkrequireslfiles, 'largefiles'
+        b'hooks', b'changegroup.lfiles', checkrequireslfiles, b'largefiles'
     )
-    ui.setconfig('hooks', 'commit.lfiles', checkrequireslfiles, 'largefiles')
+    ui.setconfig(b'hooks', b'commit.lfiles', checkrequireslfiles, b'largefiles')
--- a/hgext/largefiles/storefactory.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/largefiles/storefactory.py	Sun Oct 06 09:48:39 2019 -0400
@@ -31,18 +31,18 @@
         if lfpullsource:
             path = ui.expandpath(lfpullsource)
         elif put:
-            path = ui.expandpath('default-push', 'default')
+            path = ui.expandpath(b'default-push', b'default')
         else:
-            path = ui.expandpath('default')
+            path = ui.expandpath(b'default')
 
         # ui.expandpath() leaves 'default-push' and 'default' alone if
         # they cannot be expanded: fallback to the empty string,
         # meaning the current directory.
         if repo is None:
-            path = ui.expandpath('default')
+            path = ui.expandpath(b'default')
             path, _branches = hg.parseurl(path)
             remote = hg.peer(repo or ui, {}, path)
-        elif path == 'default-push' or path == 'default':
+        elif path == b'default-push' or path == b'default':
             remote = repo
         else:
             path, _branches = hg.parseurl(path)
@@ -50,18 +50,18 @@
 
     # The path could be a scheme so use Mercurial's normal functionality
     # to resolve the scheme to a repository and use its path
-    path = util.safehasattr(remote, 'url') and remote.url() or remote.path
+    path = util.safehasattr(remote, b'url') and remote.url() or remote.path
 
     match = _scheme_re.match(path)
     if not match:  # regular filesystem path
-        scheme = 'file'
+        scheme = b'file'
     else:
         scheme = match.group(1)
 
     try:
         storeproviders = _storeprovider[scheme]
     except KeyError:
-        raise error.Abort(_('unsupported URL scheme %r') % scheme)
+        raise error.Abort(_(b'unsupported URL scheme %r') % scheme)
 
     for classobj in storeproviders:
         try:
@@ -70,16 +70,16 @@
             pass
 
     raise error.Abort(
-        _('%s does not appear to be a largefile store')
+        _(b'%s does not appear to be a largefile store')
         % util.hidepassword(path)
     )
 
 
 _storeprovider = {
-    'file': [localstore.localstore],
-    'http': [wirestore.wirestore],
-    'https': [wirestore.wirestore],
-    'ssh': [wirestore.wirestore],
+    b'file': [localstore.localstore],
+    b'http': [wirestore.wirestore],
+    b'https': [wirestore.wirestore],
+    b'ssh': [wirestore.wirestore],
 }
 
 _scheme_re = re.compile(br'^([a-zA-Z0-9+-.]+)://')
--- a/hgext/largefiles/wirestore.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/largefiles/wirestore.py	Sun Oct 06 09:48:39 2019 -0400
@@ -14,11 +14,11 @@
 
 class wirestore(remotestore.remotestore):
     def __init__(self, ui, repo, remote):
-        cap = remote.capable('largefiles')
+        cap = remote.capable(b'largefiles')
         if not cap:
             raise lfutil.storeprotonotcapable([])
-        storetypes = cap.split(',')
-        if 'serve' not in storetypes:
+        storetypes = cap.split(b',')
+        if b'serve' not in storetypes:
             raise lfutil.storeprotonotcapable(storetypes)
         self.remote = remote
         super(wirestore, self).__init__(ui, repo, remote.url())
@@ -37,6 +37,6 @@
         with self.remote.commandexecutor() as e:
             fs = []
             for hash in hashes:
-                fs.append((hash, e.callcommand('statlfile', {'sha': hash,})))
+                fs.append((hash, e.callcommand(b'statlfile', {b'sha': hash,})))
 
             return {hash: f.result() for hash, f in fs}
--- a/hgext/lfs/__init__.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/lfs/__init__.py	Sun Oct 06 09:48:39 2019 -0400
@@ -159,7 +159,7 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 eh = exthelper.exthelper()
 eh.merge(wrapper.eh)
@@ -174,33 +174,33 @@
 templatekeyword = eh.templatekeyword
 
 eh.configitem(
-    'experimental', 'lfs.serve', default=True,
+    b'experimental', b'lfs.serve', default=True,
 )
 eh.configitem(
-    'experimental', 'lfs.user-agent', default=None,
+    b'experimental', b'lfs.user-agent', default=None,
 )
 eh.configitem(
-    'experimental', 'lfs.disableusercache', default=False,
+    b'experimental', b'lfs.disableusercache', default=False,
 )
 eh.configitem(
-    'experimental', 'lfs.worker-enable', default=False,
+    b'experimental', b'lfs.worker-enable', default=False,
 )
 
 eh.configitem(
-    'lfs', 'url', default=None,
+    b'lfs', b'url', default=None,
 )
 eh.configitem(
-    'lfs', 'usercache', default=None,
+    b'lfs', b'usercache', default=None,
 )
 # Deprecated
 eh.configitem(
-    'lfs', 'threshold', default=None,
+    b'lfs', b'threshold', default=None,
 )
 eh.configitem(
-    'lfs', 'track', default='none()',
+    b'lfs', b'track', default=b'none()',
 )
 eh.configitem(
-    'lfs', 'retry', default=5,
+    b'lfs', b'retry', default=5,
 )
 
 lfsprocessor = (
@@ -212,7 +212,7 @@
 
 def featuresetup(ui, supported):
     # don't die on seeing a repo with the lfs requirement
-    supported |= {'lfs'}
+    supported |= {b'lfs'}
 
 
 @eh.uisetup
@@ -232,76 +232,78 @@
     class lfsrepo(repo.__class__):
         @localrepo.unfilteredmethod
         def commitctx(self, ctx, error=False, origctx=None):
-            repo.svfs.options['lfstrack'] = _trackedmatcher(self)
+            repo.svfs.options[b'lfstrack'] = _trackedmatcher(self)
             return super(lfsrepo, self).commitctx(ctx, error, origctx=origctx)
 
     repo.__class__ = lfsrepo
 
-    if 'lfs' not in repo.requirements:
+    if b'lfs' not in repo.requirements:
 
         def checkrequireslfs(ui, repo, **kwargs):
-            if 'lfs' in repo.requirements:
+            if b'lfs' in repo.requirements:
                 return 0
 
             last = kwargs.get(r'node_last')
             _bin = node.bin
             if last:
-                s = repo.set('%n:%n', _bin(kwargs[r'node']), _bin(last))
+                s = repo.set(b'%n:%n', _bin(kwargs[r'node']), _bin(last))
             else:
-                s = repo.set('%n', _bin(kwargs[r'node']))
+                s = repo.set(b'%n', _bin(kwargs[r'node']))
             match = repo._storenarrowmatch
             for ctx in s:
                 # TODO: is there a way to just walk the files in the commit?
                 if any(
                     ctx[f].islfs() for f in ctx.files() if f in ctx and match(f)
                 ):
-                    repo.requirements.add('lfs')
+                    repo.requirements.add(b'lfs')
                     repo.features.add(repository.REPO_FEATURE_LFS)
                     repo._writerequirements()
-                    repo.prepushoutgoinghooks.add('lfs', wrapper.prepush)
+                    repo.prepushoutgoinghooks.add(b'lfs', wrapper.prepush)
                     break
 
-        ui.setconfig('hooks', 'commit.lfs', checkrequireslfs, 'lfs')
-        ui.setconfig('hooks', 'pretxnchangegroup.lfs', checkrequireslfs, 'lfs')
+        ui.setconfig(b'hooks', b'commit.lfs', checkrequireslfs, b'lfs')
+        ui.setconfig(
+            b'hooks', b'pretxnchangegroup.lfs', checkrequireslfs, b'lfs'
+        )
     else:
-        repo.prepushoutgoinghooks.add('lfs', wrapper.prepush)
+        repo.prepushoutgoinghooks.add(b'lfs', wrapper.prepush)
 
 
 def _trackedmatcher(repo):
     """Return a function (path, size) -> bool indicating whether or not to
     track a given file with lfs."""
-    if not repo.wvfs.exists('.hglfs'):
+    if not repo.wvfs.exists(b'.hglfs'):
         # No '.hglfs' in wdir.  Fallback to config for now.
-        trackspec = repo.ui.config('lfs', 'track')
+        trackspec = repo.ui.config(b'lfs', b'track')
 
         # deprecated config: lfs.threshold
-        threshold = repo.ui.configbytes('lfs', 'threshold')
+        threshold = repo.ui.configbytes(b'lfs', b'threshold')
         if threshold:
             filesetlang.parse(trackspec)  # make sure syntax errors are confined
-            trackspec = "(%s) | size('>%d')" % (trackspec, threshold)
+            trackspec = b"(%s) | size('>%d')" % (trackspec, threshold)
 
         return minifileset.compile(trackspec)
 
-    data = repo.wvfs.tryread('.hglfs')
+    data = repo.wvfs.tryread(b'.hglfs')
     if not data:
         return lambda p, s: False
 
     # Parse errors here will abort with a message that points to the .hglfs file
     # and line number.
     cfg = config.config()
-    cfg.parse('.hglfs', data)
+    cfg.parse(b'.hglfs', data)
 
     try:
         rules = [
             (minifileset.compile(pattern), minifileset.compile(rule))
-            for pattern, rule in cfg.items('track')
+            for pattern, rule in cfg.items(b'track')
         ]
     except error.ParseError as e:
         # The original exception gives no indicator that the error is in the
         # .hglfs file, so add that.
 
         # TODO: See if the line number of the file can be made available.
-        raise error.Abort(_('parse error in .hglfs: %s') % e)
+        raise error.Abort(_(b'parse error in .hglfs: %s') % e)
 
     def _match(path, size):
         for pat, rule in rules:
@@ -322,7 +324,7 @@
     wrapfunction(filelog, 'size', wrapper.filelogsize)
 
 
-@eh.wrapfunction(localrepo, 'resolverevlogstorevfsoptions')
+@eh.wrapfunction(localrepo, b'resolverevlogstorevfsoptions')
 def _resolverevlogstorevfsoptions(orig, ui, requirements, features):
     opts = orig(ui, requirements, features)
     for name, module in extensions.extensions(ui):
@@ -346,56 +348,56 @@
 
     context.basefilectx.islfs = wrapper.filectxislfs
 
-    scmutil.fileprefetchhooks.add('lfs', wrapper._prefetchfiles)
+    scmutil.fileprefetchhooks.add(b'lfs', wrapper._prefetchfiles)
 
     # Make bundle choose changegroup3 instead of changegroup2. This affects
     # "hg bundle" command. Note: it does not cover all bundle formats like
     # "packed1". Using "packed1" with lfs will likely cause trouble.
-    exchange._bundlespeccontentopts["v2"]["cg.version"] = "03"
+    exchange._bundlespeccontentopts[b"v2"][b"cg.version"] = b"03"
 
 
-@eh.filesetpredicate('lfs()')
+@eh.filesetpredicate(b'lfs()')
 def lfsfileset(mctx, x):
     """File that uses LFS storage."""
     # i18n: "lfs" is a keyword
-    filesetlang.getargs(x, 0, 0, _("lfs takes no arguments"))
+    filesetlang.getargs(x, 0, 0, _(b"lfs takes no arguments"))
     ctx = mctx.ctx
 
     def lfsfilep(f):
         return wrapper.pointerfromctx(ctx, f, removed=True) is not None
 
-    return mctx.predicate(lfsfilep, predrepr='<lfs>')
+    return mctx.predicate(lfsfilep, predrepr=b'<lfs>')
 
 
-@eh.templatekeyword('lfs_files', requires={'ctx'})
+@eh.templatekeyword(b'lfs_files', requires={b'ctx'})
 def lfsfiles(context, mapping):
     """List of strings. All files modified, added, or removed by this
     changeset."""
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
 
     pointers = wrapper.pointersfromctx(ctx, removed=True)  # {path: pointer}
     files = sorted(pointers.keys())
 
     def pointer(v):
         # In the file spec, version is first and the other keys are sorted.
-        sortkeyfunc = lambda x: (x[0] != 'version', x)
+        sortkeyfunc = lambda x: (x[0] != b'version', x)
         items = sorted(pointers[v].iteritems(), key=sortkeyfunc)
         return util.sortdict(items)
 
     makemap = lambda v: {
-        'file': v,
-        'lfsoid': pointers[v].oid() if pointers[v] else None,
-        'lfspointer': templateutil.hybriddict(pointer(v)),
+        b'file': v,
+        b'lfsoid': pointers[v].oid() if pointers[v] else None,
+        b'lfspointer': templateutil.hybriddict(pointer(v)),
     }
 
     # TODO: make the separator ', '?
-    f = templateutil._showcompatlist(context, mapping, 'lfs_file', files)
+    f = templateutil._showcompatlist(context, mapping, b'lfs_file', files)
     return templateutil.hybrid(f, files, makemap, pycompat.identity)
 
 
 @eh.command(
-    'debuglfsupload',
-    [('r', 'rev', [], _('upload large files introduced by REV'))],
+    b'debuglfsupload',
+    [(b'r', b'rev', [], _(b'upload large files introduced by REV'))],
 )
 def debuglfsupload(ui, repo, **opts):
     """upload lfs blobs added by the working copy parent or given revisions"""
--- a/hgext/lfs/blobstore.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/lfs/blobstore.py	Sun Oct 06 09:48:39 2019 -0400
@@ -66,7 +66,7 @@
                     [dirpath + f for f in files if _lfsre.match(dirpath + f)]
                 )
 
-        yield ('', [], oids)
+        yield (b'', [], oids)
 
 
 class nullvfs(lfsvfs):
@@ -256,7 +256,7 @@
     if isinstance(urlerror.reason, Exception):
         inst = urlerror.reason
 
-    if util.safehasattr(inst, 'reason'):
+    if util.safehasattr(inst, b'reason'):
         try:  # usually it is in the form (errno, strerror)
             reason = inst.reason.args[1]
         except (AttributeError, IndexError):
@@ -703,11 +703,11 @@
     https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md
     """
     lfsurl = repo.ui.config(b'lfs', b'url')
-    url = util.url(lfsurl or '')
+    url = util.url(lfsurl or b'')
     if lfsurl is None:
         if remote:
             path = remote
-        elif util.safehasattr(repo, '_subtoppath'):
+        elif util.safehasattr(repo, b'_subtoppath'):
             # The pull command sets this during the optional update phase, which
             # tells exactly where the pull originated, whether 'paths.default'
             # or explicit.
--- a/hgext/lfs/pointer.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/lfs/pointer.py	Sun Oct 06 09:48:39 2019 -0400
@@ -23,41 +23,41 @@
 
 
 class gitlfspointer(dict):
-    VERSION = 'https://git-lfs.github.com/spec/v1'
+    VERSION = b'https://git-lfs.github.com/spec/v1'
 
     def __init__(self, *args, **kwargs):
-        self['version'] = self.VERSION
+        self[b'version'] = self.VERSION
         super(gitlfspointer, self).__init__(*args)
         self.update(pycompat.byteskwargs(kwargs))
 
     @classmethod
     def deserialize(cls, text):
         try:
-            return cls(l.split(' ', 1) for l in text.splitlines()).validate()
+            return cls(l.split(b' ', 1) for l in text.splitlines()).validate()
         except ValueError:  # l.split returns 1 item instead of 2
             raise InvalidPointer(
-                _('cannot parse git-lfs text: %s') % stringutil.pprint(text)
+                _(b'cannot parse git-lfs text: %s') % stringutil.pprint(text)
             )
 
     def serialize(self):
-        sortkeyfunc = lambda x: (x[0] != 'version', x)
+        sortkeyfunc = lambda x: (x[0] != b'version', x)
         items = sorted(self.validate().iteritems(), key=sortkeyfunc)
-        return ''.join('%s %s\n' % (k, v) for k, v in items)
+        return b''.join(b'%s %s\n' % (k, v) for k, v in items)
 
     def oid(self):
-        return self['oid'].split(':')[-1]
+        return self[b'oid'].split(b':')[-1]
 
     def size(self):
-        return int(self['size'])
+        return int(self[b'size'])
 
     # regular expressions used by _validate
     # see https://github.com/git-lfs/git-lfs/blob/master/docs/spec.md
     _keyre = re.compile(br'\A[a-z0-9.-]+\Z')
     _valuere = re.compile(br'\A[^\n]*\Z')
     _requiredre = {
-        'size': re.compile(br'\A[0-9]+\Z'),
-        'oid': re.compile(br'\Asha256:[0-9a-f]{64}\Z'),
-        'version': re.compile(br'\A%s\Z' % stringutil.reescape(VERSION)),
+        b'size': re.compile(br'\A[0-9]+\Z'),
+        b'oid': re.compile(br'\Asha256:[0-9a-f]{64}\Z'),
+        b'version': re.compile(br'\A%s\Z' % stringutil.reescape(VERSION)),
     }
 
     def validate(self):
@@ -67,21 +67,21 @@
             if k in self._requiredre:
                 if not self._requiredre[k].match(v):
                     raise InvalidPointer(
-                        _('unexpected lfs pointer value: %s=%s')
+                        _(b'unexpected lfs pointer value: %s=%s')
                         % (k, stringutil.pprint(v))
                     )
                 requiredcount += 1
             elif not self._keyre.match(k):
-                raise InvalidPointer(_('unexpected lfs pointer key: %s') % k)
+                raise InvalidPointer(_(b'unexpected lfs pointer key: %s') % k)
             if not self._valuere.match(v):
                 raise InvalidPointer(
-                    _('unexpected lfs pointer value: %s=%s')
+                    _(b'unexpected lfs pointer value: %s=%s')
                     % (k, stringutil.pprint(v))
                 )
         if len(self._requiredre) != requiredcount:
             miss = sorted(set(self._requiredre.keys()).difference(self.keys()))
             raise InvalidPointer(
-                _('missing lfs pointer keys: %s') % ', '.join(miss)
+                _(b'missing lfs pointer keys: %s') % b', '.join(miss)
             )
         return self
 
--- a/hgext/lfs/wireprotolfsserver.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/lfs/wireprotolfsserver.py	Sun Oct 06 09:48:39 2019 -0400
@@ -34,7 +34,7 @@
 eh = exthelper.exthelper()
 
 
-@eh.wrapfunction(wireprotoserver, 'handlewsgirequest')
+@eh.wrapfunction(wireprotoserver, b'handlewsgirequest')
 def handlewsgirequest(orig, rctx, req, res, checkperm):
     """Wrap wireprotoserver.handlewsgirequest() to possibly process an LFS
     request if it is left unprocessed by the wrapped method.
@@ -45,7 +45,7 @@
     if not rctx.repo.ui.configbool(b'experimental', b'lfs.serve'):
         return False
 
-    if not util.safehasattr(rctx.repo.svfs, 'lfslocalblobstore'):
+    if not util.safehasattr(rctx.repo.svfs, b'lfslocalblobstore'):
         return False
 
     if not req.dispatchpath:
--- a/hgext/lfs/wrapper.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/lfs/wrapper.py	Sun Oct 06 09:48:39 2019 -0400
@@ -46,7 +46,7 @@
 eh = exthelper.exthelper()
 
 
-@eh.wrapfunction(localrepo, 'makefilestorage')
+@eh.wrapfunction(localrepo, b'makefilestorage')
 def localrepomakefilestorage(orig, requirements, features, **kwargs):
     if b'lfs' in requirements:
         features.add(repository.REPO_FEATURE_LFS)
@@ -54,26 +54,26 @@
     return orig(requirements=requirements, features=features, **kwargs)
 
 
-@eh.wrapfunction(changegroup, 'allsupportedversions')
+@eh.wrapfunction(changegroup, b'allsupportedversions')
 def allsupportedversions(orig, ui):
     versions = orig(ui)
-    versions.add('03')
+    versions.add(b'03')
     return versions
 
 
-@eh.wrapfunction(wireprotov1server, '_capabilities')
+@eh.wrapfunction(wireprotov1server, b'_capabilities')
 def _capabilities(orig, repo, proto):
     '''Wrap server command to announce lfs server capability'''
     caps = orig(repo, proto)
-    if util.safehasattr(repo.svfs, 'lfslocalblobstore'):
+    if util.safehasattr(repo.svfs, b'lfslocalblobstore'):
         # Advertise a slightly different capability when lfs is *required*, so
         # that the client knows it MUST load the extension.  If lfs is not
         # required on the server, there's no reason to autoload the extension
         # on the client.
         if b'lfs' in repo.requirements:
-            caps.append('lfs-serve')
+            caps.append(b'lfs-serve')
 
-        caps.append('lfs')
+        caps.append(b'lfs')
     return caps
 
 
@@ -101,10 +101,10 @@
     # pack hg filelog metadata
     hgmeta = {}
     for k in p.keys():
-        if k.startswith('x-hg-'):
-            name = k[len('x-hg-') :]
+        if k.startswith(b'x-hg-'):
+            name = k[len(b'x-hg-') :]
             hgmeta[name] = p[k]
-    if hgmeta or text.startswith('\1\n'):
+    if hgmeta or text.startswith(b'\1\n'):
         text = storageutil.packmeta(hgmeta, text)
 
     return (text, True, {})
@@ -122,20 +122,20 @@
     self.opener.lfslocalblobstore.write(oid, text)
 
     # replace contents with metadata
-    longoid = 'sha256:%s' % oid
-    metadata = pointer.gitlfspointer(oid=longoid, size='%d' % len(text))
+    longoid = b'sha256:%s' % oid
+    metadata = pointer.gitlfspointer(oid=longoid, size=b'%d' % len(text))
 
     # by default, we expect the content to be binary. however, LFS could also
     # be used for non-binary content. add a special entry for non-binary data.
     # this will be used by filectx.isbinary().
     if not stringutil.binary(text):
         # not hg filelog metadata (affecting commit hash), no "x-hg-" prefix
-        metadata['x-is-binary'] = '0'
+        metadata[b'x-is-binary'] = b'0'
 
     # translate hg filelog metadata to lfs metadata with "x-hg-" prefix
     if hgmeta is not None:
         for k, v in hgmeta.iteritems():
-            metadata['x-hg-%s' % k] = v
+            metadata[b'x-hg-%s' % k] = v
 
     rawtext = metadata.serialize()
     return (rawtext, False)
@@ -170,7 +170,7 @@
     **kwds
 ):
     # The matcher isn't available if reposetup() wasn't called.
-    lfstrack = self._revlog.opener.options.get('lfstrack')
+    lfstrack = self._revlog.opener.options.get(b'lfstrack')
 
     if lfstrack:
         textlen = len(text)
@@ -203,8 +203,8 @@
         if not rawtext:
             return False
         metadata = pointer.deserialize(rawtext)
-        if 'x-hg-copy' in metadata and 'x-hg-copyrev' in metadata:
-            return metadata['x-hg-copy'], bin(metadata['x-hg-copyrev'])
+        if b'x-hg-copy' in metadata and b'x-hg-copyrev' in metadata:
+            return metadata[b'x-hg-copy'], bin(metadata[b'x-hg-copyrev'])
         else:
             return False
     return orig(self, node)
@@ -216,11 +216,11 @@
         # fast path: use lfs metadata to answer size
         rawtext = self._revlog.rawdata(rev)
         metadata = pointer.deserialize(rawtext)
-        return int(metadata['size'])
+        return int(metadata[b'size'])
     return orig(self, rev)
 
 
-@eh.wrapfunction(context.basefilectx, 'cmp')
+@eh.wrapfunction(context.basefilectx, b'cmp')
 def filectxcmp(orig, self, fctx):
     """returns True if text is different than fctx"""
     # some fctx (ex. hg-git) is not based on basefilectx and do not have islfs
@@ -232,13 +232,13 @@
     return orig(self, fctx)
 
 
-@eh.wrapfunction(context.basefilectx, 'isbinary')
+@eh.wrapfunction(context.basefilectx, b'isbinary')
 def filectxisbinary(orig, self):
     if self.islfs():
         # fast path: use lfs metadata to answer isbinary
         metadata = pointer.deserialize(self.rawdata())
         # if lfs metadata says nothing, assume it's binary by default
-        return bool(int(metadata.get('x-is-binary', 1)))
+        return bool(int(metadata.get(b'x-is-binary', 1)))
     return orig(self)
 
 
@@ -246,16 +246,16 @@
     return _islfs(self.filelog(), self.filenode())
 
 
-@eh.wrapfunction(cmdutil, '_updatecatformatter')
+@eh.wrapfunction(cmdutil, b'_updatecatformatter')
 def _updatecatformatter(orig, fm, ctx, matcher, path, decode):
     orig(fm, ctx, matcher, path, decode)
     fm.data(rawdata=ctx[path].rawdata())
 
 
-@eh.wrapfunction(scmutil, 'wrapconvertsink')
+@eh.wrapfunction(scmutil, b'wrapconvertsink')
 def convertsink(orig, sink):
     sink = orig(sink)
-    if sink.repotype == 'hg':
+    if sink.repotype == b'hg':
 
         class lfssink(sink.__class__):
             def putcommit(
@@ -281,13 +281,13 @@
                     cleanp2,
                 )
 
-                if 'lfs' not in self.repo.requirements:
+                if b'lfs' not in self.repo.requirements:
                     ctx = self.repo[node]
 
                     # The file list may contain removed files, so check for
                     # membership before assuming it is in the context.
                     if any(f in ctx and ctx[f].islfs() for f, n in files):
-                        self.repo.requirements.add('lfs')
+                        self.repo.requirements.add(b'lfs')
                         self.repo._writerequirements()
 
                 return node
@@ -299,16 +299,16 @@
 
 # bundlerepo uses "vfsmod.readonlyvfs(othervfs)", we need to make sure lfs
 # options and blob stores are passed from othervfs to the new readonlyvfs.
-@eh.wrapfunction(vfsmod.readonlyvfs, '__init__')
+@eh.wrapfunction(vfsmod.readonlyvfs, b'__init__')
 def vfsinit(orig, self, othervfs):
     orig(self, othervfs)
     # copy lfs related options
     for k, v in othervfs.options.items():
-        if k.startswith('lfs'):
+        if k.startswith(b'lfs'):
             self.options[k] = v
     # also copy lfs blobstores. note: this can run before reposetup, so lfs
     # blobstore attributes are not always ready at this time.
-    for name in ['lfslocalblobstore', 'lfsremoteblobstore']:
+    for name in [b'lfslocalblobstore', b'lfsremoteblobstore']:
         if util.safehasattr(othervfs, name):
             setattr(self, name, getattr(othervfs, name))
 
@@ -316,7 +316,7 @@
 def _prefetchfiles(repo, revs, match):
     """Ensure that required LFS blobs are present, fetching them as a group if
     needed."""
-    if not util.safehasattr(repo.svfs, 'lfslocalblobstore'):
+    if not util.safehasattr(repo.svfs, b'lfslocalblobstore'):
         return
 
     pointers = []
@@ -340,7 +340,7 @@
 
 def _canskipupload(repo):
     # Skip if this hasn't been passed to reposetup()
-    if not util.safehasattr(repo.svfs, 'lfsremoteblobstore'):
+    if not util.safehasattr(repo.svfs, b'lfsremoteblobstore'):
         return True
 
     # if remotestore is a null store, upload is a no-op and can be skipped
@@ -349,7 +349,7 @@
 
 def candownload(repo):
     # Skip if this hasn't been passed to reposetup()
-    if not util.safehasattr(repo.svfs, 'lfsremoteblobstore'):
+    if not util.safehasattr(repo.svfs, b'lfsremoteblobstore'):
         return False
 
     # if remotestore is a null store, downloads will lead to nothing
@@ -377,20 +377,20 @@
     return uploadblobsfromrevs(pushop.repo, pushop.outgoing.missing)
 
 
-@eh.wrapfunction(exchange, 'push')
+@eh.wrapfunction(exchange, b'push')
 def push(orig, repo, remote, *args, **kwargs):
     """bail on push if the extension isn't enabled on remote when needed, and
     update the remote store based on the destination path."""
-    if 'lfs' in repo.requirements:
+    if b'lfs' in repo.requirements:
         # If the remote peer is for a local repo, the requirement tests in the
         # base class method enforce lfs support.  Otherwise, some revisions in
         # this repo use lfs, and the remote repo needs the extension loaded.
-        if not remote.local() and not remote.capable('lfs'):
+        if not remote.local() and not remote.capable(b'lfs'):
             # This is a copy of the message in exchange.push() when requirements
             # are missing between local repos.
-            m = _("required features are not supported in the destination: %s")
+            m = _(b"required features are not supported in the destination: %s")
             raise error.Abort(
-                m % 'lfs', hint=_('enable the lfs extension on the server')
+                m % b'lfs', hint=_(b'enable the lfs extension on the server')
             )
 
         # Repositories where this extension is disabled won't have the field.
@@ -407,7 +407,7 @@
 
 
 # when writing a bundle via "hg bundle" command, upload related LFS blobs
-@eh.wrapfunction(bundle2, 'writenewbundle')
+@eh.wrapfunction(bundle2, b'writenewbundle')
 def writenewbundle(
     orig, ui, repo, source, filename, bundletype, outgoing, *args, **kwargs
 ):
@@ -420,11 +420,13 @@
 
 def extractpointers(repo, revs):
     """return a list of lfs pointers added by given revs"""
-    repo.ui.debug('lfs: computing set of blobs to upload\n')
+    repo.ui.debug(b'lfs: computing set of blobs to upload\n')
     pointers = {}
 
     makeprogress = repo.ui.makeprogress
-    with makeprogress(_('lfs search'), _('changesets'), len(revs)) as progress:
+    with makeprogress(
+        _(b'lfs search'), _(b'changesets'), len(revs)
+    ) as progress:
         for r in revs:
             ctx = repo[r]
             for p in pointersfromctx(ctx).values():
@@ -461,7 +463,7 @@
         return {}
     except pointer.InvalidPointer as ex:
         raise error.Abort(
-            _('lfs: corrupted pointer (%s@%s): %s\n')
+            _(b'lfs: corrupted pointer (%s@%s): %s\n')
             % (f, short(_ctx.node()), ex)
         )
 
@@ -494,27 +496,27 @@
     remoteblob.writebatch(pointers, repo.svfs.lfslocalblobstore)
 
 
-@eh.wrapfunction(upgrade, '_finishdatamigration')
+@eh.wrapfunction(upgrade, b'_finishdatamigration')
 def upgradefinishdatamigration(orig, ui, srcrepo, dstrepo, requirements):
     orig(ui, srcrepo, dstrepo, requirements)
 
     # Skip if this hasn't been passed to reposetup()
-    if util.safehasattr(srcrepo.svfs, 'lfslocalblobstore') and util.safehasattr(
-        dstrepo.svfs, 'lfslocalblobstore'
-    ):
+    if util.safehasattr(
+        srcrepo.svfs, b'lfslocalblobstore'
+    ) and util.safehasattr(dstrepo.svfs, b'lfslocalblobstore'):
         srclfsvfs = srcrepo.svfs.lfslocalblobstore.vfs
         dstlfsvfs = dstrepo.svfs.lfslocalblobstore.vfs
 
         for dirpath, dirs, files in srclfsvfs.walk():
             for oid in files:
-                ui.write(_('copying lfs blob %s\n') % oid)
+                ui.write(_(b'copying lfs blob %s\n') % oid)
                 lfutil.link(srclfsvfs.join(oid), dstlfsvfs.join(oid))
 
 
-@eh.wrapfunction(upgrade, 'preservedrequirements')
-@eh.wrapfunction(upgrade, 'supporteddestrequirements')
+@eh.wrapfunction(upgrade, b'preservedrequirements')
+@eh.wrapfunction(upgrade, b'supporteddestrequirements')
 def upgraderequirements(orig, repo):
     reqs = orig(repo)
-    if 'lfs' in repo.requirements:
-        reqs.add('lfs')
+    if b'lfs' in repo.requirements:
+        reqs.add(b'lfs')
     return reqs
--- a/hgext/logtoprocess.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/logtoprocess.py	Sun Oct 06 09:48:39 2019 -0400
@@ -42,7 +42,7 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 
 class processlogger(object):
--- a/hgext/mq.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/mq.py	Sun Oct 06 09:48:39 2019 -0400
@@ -104,7 +104,7 @@
 )
 
 release = lockmod.release
-seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
+seriesopts = [(b's', b'summary', None, _(b'print first line of patch header'))]
 
 cmdtable = {}
 command = registrar.command(cmdtable)
@@ -112,27 +112,27 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 configtable = {}
 configitem = registrar.configitem(configtable)
 
 configitem(
-    'mq', 'git', default='auto',
+    b'mq', b'git', default=b'auto',
 )
 configitem(
-    'mq', 'keepchanges', default=False,
+    b'mq', b'keepchanges', default=False,
 )
 configitem(
-    'mq', 'plain', default=False,
+    b'mq', b'plain', default=False,
 )
 configitem(
-    'mq', 'secret', default=False,
+    b'mq', b'secret', default=False,
 )
 
 # force load strip extension formerly included in mq and import some utility
 try:
-    stripext = extensions.find('strip')
+    stripext = extensions.find(b'strip')
 except KeyError:
     # note: load is lazy so we could avoid the try-except,
     # but I (marmoute) prefer this explicit code.
@@ -143,7 +143,7 @@
         def log(self, event, msgfmt, *msgargs, **opts):
             pass
 
-    stripext = extensions.load(dummyui(), 'strip', '')
+    stripext = extensions.load(dummyui(), b'strip', b'')
 
 strip = stripext.strip
 
@@ -174,7 +174,7 @@
         self.node, self.name = node, name
 
     def __bytes__(self):
-        return hex(self.node) + ':' + self.name
+        return hex(self.node) + b':' + self.name
 
     __str__ = encoding.strmethod(__bytes__)
     __repr__ = encoding.strmethod(__bytes__)
@@ -183,18 +183,18 @@
 # The order of the headers in 'hg export' HG patches:
 HGHEADERS = [
     #   '# HG changeset patch',
-    '# User ',
-    '# Date ',
-    '#      ',
-    '# Branch ',
-    '# Node ID ',
-    '# Parent  ',  # can occur twice for merges - but that is not relevant for mq
+    b'# User ',
+    b'# Date ',
+    b'#      ',
+    b'# Branch ',
+    b'# Node ID ',
+    b'# Parent  ',  # can occur twice for merges - but that is not relevant for mq
 ]
 # The order of headers in plain 'mail style' patches:
 PLAINHEADERS = {
-    'from': 0,
-    'date': 1,
-    'subject': 2,
+    b'from': 0,
+    b'date': 1,
+    b'subject': 2,
 }
 
 
@@ -221,12 +221,12 @@
     ...                b'# Date ', b'z')
     ['# HG changeset patch', '# Date z', '# Parent  y']
     """
-    start = lines.index('# HG changeset patch') + 1
+    start = lines.index(b'# HG changeset patch') + 1
     newindex = HGHEADERS.index(header)
     bestpos = len(lines)
     for i in range(start, len(lines)):
         line = lines[i]
-        if not line.startswith('# '):
+        if not line.startswith(b'# '):
             bestpos = min(bestpos, i)
             break
         for lineindex, h in enumerate(HGHEADERS):
@@ -261,21 +261,21 @@
     newprio = PLAINHEADERS[header.lower()]
     bestpos = len(lines)
     for i, line in enumerate(lines):
-        if ':' in line:
-            lheader = line.split(':', 1)[0].strip().lower()
+        if b':' in line:
+            lheader = line.split(b':', 1)[0].strip().lower()
             lprio = PLAINHEADERS.get(lheader, newprio + 1)
             if lprio == newprio:
-                lines[i] = '%s: %s' % (header, value)
+                lines[i] = b'%s: %s' % (header, value)
                 return lines
             if lprio > newprio and i < bestpos:
                 bestpos = i
         else:
             if line:
-                lines.insert(i, '')
+                lines.insert(i, b'')
             if i < bestpos:
                 bestpos = i
             break
-    lines.insert(bestpos, '%s: %s' % (header, value))
+    lines.insert(bestpos, b'%s: %s' % (header, value))
     return lines
 
 
@@ -285,9 +285,9 @@
             while lines:
                 l = lines[-1]
                 if (
-                    l.startswith("diff -")
-                    or l.startswith("Index:")
-                    or l.startswith("===========")
+                    l.startswith(b"diff -")
+                    or l.startswith(b"Index:")
+                    or l.startswith(b"===========")
                 ):
                     del lines[-1]
                 else:
@@ -311,54 +311,54 @@
         nodeid = None
         diffstart = 0
 
-        for line in open(pf, 'rb'):
+        for line in open(pf, b'rb'):
             line = line.rstrip()
-            if line.startswith('diff --git') or (
-                diffstart and line.startswith('+++ ')
+            if line.startswith(b'diff --git') or (
+                diffstart and line.startswith(b'+++ ')
             ):
                 diffstart = 2
                 break
             diffstart = 0  # reset
-            if line.startswith("--- "):
+            if line.startswith(b"--- "):
                 diffstart = 1
                 continue
-            elif format == "hgpatch":
+            elif format == b"hgpatch":
                 # parse values when importing the result of an hg export
-                if line.startswith("# User "):
+                if line.startswith(b"# User "):
                     user = line[7:]
-                elif line.startswith("# Date "):
+                elif line.startswith(b"# Date "):
                     date = line[7:]
-                elif line.startswith("# Parent "):
+                elif line.startswith(b"# Parent "):
                     parent = line[9:].lstrip()  # handle double trailing space
-                elif line.startswith("# Branch "):
+                elif line.startswith(b"# Branch "):
                     branch = line[9:]
-                elif line.startswith("# Node ID "):
+                elif line.startswith(b"# Node ID "):
                     nodeid = line[10:]
-                elif not line.startswith("# ") and line:
+                elif not line.startswith(b"# ") and line:
                     message.append(line)
                     format = None
-            elif line == '# HG changeset patch':
+            elif line == b'# HG changeset patch':
                 message = []
-                format = "hgpatch"
-            elif format != "tagdone" and (
-                line.startswith("Subject: ") or line.startswith("subject: ")
+                format = b"hgpatch"
+            elif format != b"tagdone" and (
+                line.startswith(b"Subject: ") or line.startswith(b"subject: ")
             ):
                 subject = line[9:]
-                format = "tag"
-            elif format != "tagdone" and (
-                line.startswith("From: ") or line.startswith("from: ")
+                format = b"tag"
+            elif format != b"tagdone" and (
+                line.startswith(b"From: ") or line.startswith(b"from: ")
             ):
                 user = line[6:]
-                format = "tag"
-            elif format != "tagdone" and (
-                line.startswith("Date: ") or line.startswith("date: ")
+                format = b"tag"
+            elif format != b"tagdone" and (
+                line.startswith(b"Date: ") or line.startswith(b"date: ")
             ):
                 date = line[6:]
-                format = "tag"
-            elif format == "tag" and line == "":
+                format = b"tag"
+            elif format == b"tag" and line == b"":
                 # when looking for tags (subject: from: etc) they
                 # end once you find a blank line in the source
-                format = "tagdone"
+                format = b"tagdone"
             elif message or line:
                 message.append(line)
             comments.append(line)
@@ -372,7 +372,7 @@
         eatempty(comments)
 
         # make sure message isn't empty
-        if format and format.startswith("tag") and subject:
+        if format and format.startswith(b"tag") and subject:
             message.insert(0, subject)
 
         self.message = message
@@ -386,41 +386,41 @@
         self.haspatch = diffstart > 1
         self.plainmode = (
             plainmode
-            or '# HG changeset patch' not in self.comments
+            or b'# HG changeset patch' not in self.comments
             and any(
-                c.startswith('Date: ') or c.startswith('From: ')
+                c.startswith(b'Date: ') or c.startswith(b'From: ')
                 for c in self.comments
             )
         )
 
     def setuser(self, user):
         try:
-            inserthgheader(self.comments, '# User ', user)
+            inserthgheader(self.comments, b'# User ', user)
         except ValueError:
             if self.plainmode:
-                insertplainheader(self.comments, 'From', user)
+                insertplainheader(self.comments, b'From', user)
             else:
-                tmp = ['# HG changeset patch', '# User ' + user]
+                tmp = [b'# HG changeset patch', b'# User ' + user]
                 self.comments = tmp + self.comments
         self.user = user
 
     def setdate(self, date):
         try:
-            inserthgheader(self.comments, '# Date ', date)
+            inserthgheader(self.comments, b'# Date ', date)
         except ValueError:
             if self.plainmode:
-                insertplainheader(self.comments, 'Date', date)
+                insertplainheader(self.comments, b'Date', date)
             else:
-                tmp = ['# HG changeset patch', '# Date ' + date]
+                tmp = [b'# HG changeset patch', b'# Date ' + date]
                 self.comments = tmp + self.comments
         self.date = date
 
     def setparent(self, parent):
         try:
-            inserthgheader(self.comments, '# Parent  ', parent)
+            inserthgheader(self.comments, b'# Parent  ', parent)
         except ValueError:
             if not self.plainmode:
-                tmp = ['# HG changeset patch', '# Parent  ' + parent]
+                tmp = [b'# HG changeset patch', b'# Parent  ' + parent]
                 self.comments = tmp + self.comments
         self.parent = parent
 
@@ -430,14 +430,14 @@
         self.message = [message]
         if message:
             if self.plainmode and self.comments and self.comments[-1]:
-                self.comments.append('')
+                self.comments.append(b'')
             self.comments.append(message)
 
     def __bytes__(self):
-        s = '\n'.join(self.comments).rstrip()
+        s = b'\n'.join(self.comments).rstrip()
         if not s:
-            return ''
-        return s + '\n\n'
+            return b''
+        return s + b'\n\n'
 
     __str__ = encoding.strmethod(__bytes__)
 
@@ -446,7 +446,7 @@
         If comments contains 'subject: ', message will prepend
         the field and a blank line.'''
         if self.message:
-            subj = 'subject: ' + self.message[0].lower()
+            subj = b'subject: ' + self.message[0].lower()
             for i in pycompat.xrange(len(self.comments)):
                 if subj == self.comments[i].lower():
                     del self.comments[i]
@@ -467,13 +467,13 @@
     """
     repo = repo.unfiltered()
     if phase is None:
-        if repo.ui.configbool('mq', 'secret'):
+        if repo.ui.configbool(b'mq', b'secret'):
             phase = phases.secret
-    overrides = {('ui', 'allowemptycommit'): True}
+    overrides = {(b'ui', b'allowemptycommit'): True}
     if phase is not None:
-        overrides[('phases', 'new-commit')] = phase
-    with repo.ui.configoverride(overrides, 'mq'):
-        repo.ui.setconfig('ui', 'allowemptycommit', True)
+        overrides[(b'phases', b'new-commit')] = phase
+    with repo.ui.configoverride(overrides, b'mq'):
+        repo.ui.setconfig(b'ui', b'allowemptycommit', True)
         return repo.commit(*args, **kwargs)
 
 
@@ -485,15 +485,15 @@
     def __init__(self, ui, baseui, path, patchdir=None):
         self.basepath = path
         try:
-            with open(os.path.join(path, 'patches.queue'), r'rb') as fh:
+            with open(os.path.join(path, b'patches.queue'), r'rb') as fh:
                 cur = fh.read().rstrip()
 
             if not cur:
-                curpath = os.path.join(path, 'patches')
+                curpath = os.path.join(path, b'patches')
             else:
-                curpath = os.path.join(path, 'patches-' + cur)
+                curpath = os.path.join(path, b'patches-' + cur)
         except IOError:
-            curpath = os.path.join(path, 'patches')
+            curpath = os.path.join(path, b'patches')
         self.path = patchdir or curpath
         self.opener = vfsmod.vfs(self.path)
         self.ui = ui
@@ -501,35 +501,35 @@
         self.applieddirty = False
         self.seriesdirty = False
         self.added = []
-        self.seriespath = "series"
-        self.statuspath = "status"
-        self.guardspath = "guards"
+        self.seriespath = b"series"
+        self.statuspath = b"status"
+        self.guardspath = b"guards"
         self.activeguards = None
         self.guardsdirty = False
         # Handle mq.git as a bool with extended values
-        gitmode = ui.config('mq', 'git').lower()
+        gitmode = ui.config(b'mq', b'git').lower()
         boolmode = stringutil.parsebool(gitmode)
         if boolmode is not None:
             if boolmode:
-                gitmode = 'yes'
+                gitmode = b'yes'
             else:
-                gitmode = 'no'
+                gitmode = b'no'
         self.gitmode = gitmode
         # deprecated config: mq.plain
-        self.plainmode = ui.configbool('mq', 'plain')
+        self.plainmode = ui.configbool(b'mq', b'plain')
         self.checkapplied = True
 
     @util.propertycache
     def applied(self):
         def parselines(lines):
             for l in lines:
-                entry = l.split(':', 1)
+                entry = l.split(b':', 1)
                 if len(entry) > 1:
                     n, name = entry
                     yield statusentry(bin(n), name)
                 elif l.strip():
                     self.ui.warn(
-                        _('malformated mq status line: %s\n')
+                        _(b'malformated mq status line: %s\n')
                         % stringutil.pprint(entry)
                     )
                 # else we ignore empty lines
@@ -562,7 +562,7 @@
         return self.seriesguards
 
     def invalidate(self):
-        for a in 'applied fullseries series seriesguards'.split():
+        for a in b'applied fullseries series seriesguards'.split():
             if a in self.__dict__:
                 delattr(self, a)
         self.applieddirty = False
@@ -580,15 +580,15 @@
             whitespace=not plain,
             formatchanging=not plain,
         )
-        if self.gitmode == 'auto':
+        if self.gitmode == b'auto':
             diffopts.upgrade = True
-        elif self.gitmode == 'keep':
+        elif self.gitmode == b'keep':
             pass
-        elif self.gitmode in ('yes', 'no'):
-            diffopts.git = self.gitmode == 'yes'
+        elif self.gitmode in (b'yes', b'no'):
+            diffopts.git = self.gitmode == b'yes'
         else:
             raise error.Abort(
-                _('mq.git option can be auto/keep/yes/no' ' got %s')
+                _(b'mq.git option can be auto/keep/yes/no' b' got %s')
                 % self.gitmode
             )
         if patchfn:
@@ -600,12 +600,12 @@
         referenced patch is a git patch and should be preserved as such.
         """
         diffopts = diffopts.copy()
-        if not diffopts.git and self.gitmode == 'keep':
+        if not diffopts.git and self.gitmode == b'keep':
             for patchfn in patches:
-                patchf = self.opener(patchfn, 'r')
+                patchf = self.opener(patchfn, b'r')
                 # if the patch was a git patch, refresh it as a git patch
                 diffopts.git = any(
-                    line.startswith('diff --git') for line in patchf
+                    line.startswith(b'diff --git') for line in patchf
                 )
                 patchf.close()
         return diffopts
@@ -615,7 +615,7 @@
 
     def findseries(self, patch):
         def matchpatch(l):
-            l = l.split('#', 1)[0]
+            l = l.split(b'#', 1)[0]
             return l.strip() == patch
 
         for index, l in enumerate(self.fullseries):
@@ -629,10 +629,10 @@
         self.series = []
         self.seriesguards = []
         for l in self.fullseries:
-            h = l.find('#')
+            h = l.find(b'#')
             if h == -1:
                 patch = l
-                comment = ''
+                comment = b''
             elif h == 0:
                 continue
             else:
@@ -642,7 +642,7 @@
             if patch:
                 if patch in self.series:
                     raise error.Abort(
-                        _('%s appears more than once in %s')
+                        _(b'%s appears more than once in %s')
                         % (patch, self.join(self.seriespath))
                     )
                 self.series.append(patch)
@@ -650,17 +650,17 @@
 
     def checkguard(self, guard):
         if not guard:
-            return _('guard cannot be an empty string')
-        bad_chars = '# \t\r\n\f'
+            return _(b'guard cannot be an empty string')
+        bad_chars = b'# \t\r\n\f'
         first = guard[0]
-        if first in '-+':
-            return _('guard %r starts with invalid character: %r') % (
+        if first in b'-+':
+            return _(b'guard %r starts with invalid character: %r') % (
                 guard,
                 first,
             )
         for c in bad_chars:
             if c in guard:
-                return _('invalid character in guard %r: %r') % (guard, c)
+                return _(b'invalid character in guard %r: %r') % (guard, c)
 
     def setactive(self, guards):
         for guard in guards:
@@ -668,7 +668,7 @@
             if bad:
                 raise error.Abort(bad)
         guards = sorted(set(guards))
-        self.ui.debug('active guards: %s\n' % ' '.join(guards))
+        self.ui.debug(b'active guards: %s\n' % b' '.join(guards))
         self.activeguards = guards
         self.guardsdirty = True
 
@@ -685,7 +685,8 @@
                 bad = self.checkguard(guard)
                 if bad:
                     self.ui.warn(
-                        '%s:%d: %s\n' % (self.join(self.guardspath), i + 1, bad)
+                        b'%s:%d: %s\n'
+                        % (self.join(self.guardspath), i + 1, bad)
                     )
                 else:
                     self.activeguards.append(guard)
@@ -694,14 +695,14 @@
     def setguards(self, idx, guards):
         for g in guards:
             if len(g) < 2:
-                raise error.Abort(_('guard %r too short') % g)
-            if g[0] not in '-+':
-                raise error.Abort(_('guard %r starts with invalid char') % g)
+                raise error.Abort(_(b'guard %r too short') % g)
+            if g[0] not in b'-+':
+                raise error.Abort(_(b'guard %r starts with invalid char') % g)
             bad = self.checkguard(g[1:])
             if bad:
                 raise error.Abort(bad)
-        drop = self.guard_re.sub('', self.fullseries[idx])
-        self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
+        drop = self.guard_re.sub(b'', self.fullseries[idx])
+        self.fullseries[idx] = drop + b''.join([b' #' + g for g in guards])
         self.parseseries()
         self.seriesdirty = True
 
@@ -713,17 +714,17 @@
             return True, None
         guards = self.active()
         exactneg = [
-            g for g in patchguards if g.startswith('-') and g[1:] in guards
+            g for g in patchguards if g.startswith(b'-') and g[1:] in guards
         ]
         if exactneg:
             return False, stringutil.pprint(exactneg[0])
-        pos = [g for g in patchguards if g.startswith('+')]
+        pos = [g for g in patchguards if g.startswith(b'+')]
         exactpos = [g for g in pos if g[1:] in guards]
         if pos:
             if exactpos:
                 return True, stringutil.pprint(exactpos[0])
-            return False, ' '.join([stringutil.pprint(p) for p in pos])
-        return True, ''
+            return False, b' '.join([stringutil.pprint(p) for p in pos])
+        return True, b''
 
     def explainpushable(self, idx, all_patches=False):
         if all_patches:
@@ -738,37 +739,37 @@
             if all_patches and pushable:
                 if why is None:
                     write(
-                        _('allowing %s - no guards in effect\n')
+                        _(b'allowing %s - no guards in effect\n')
                         % self.series[idx]
                     )
                 else:
                     if not why:
                         write(
-                            _('allowing %s - no matching negative guards\n')
+                            _(b'allowing %s - no matching negative guards\n')
                             % self.series[idx]
                         )
                     else:
                         write(
-                            _('allowing %s - guarded by %s\n')
+                            _(b'allowing %s - guarded by %s\n')
                             % (self.series[idx], why)
                         )
             if not pushable:
                 if why:
                     write(
-                        _('skipping %s - guarded by %s\n')
+                        _(b'skipping %s - guarded by %s\n')
                         % (self.series[idx], why)
                     )
                 else:
                     write(
-                        _('skipping %s - no matching guards\n')
+                        _(b'skipping %s - no matching guards\n')
                         % self.series[idx]
                     )
 
     def savedirty(self):
         def writelist(items, path):
-            fp = self.opener(path, 'wb')
+            fp = self.opener(path, b'wb')
             for i in items:
-                fp.write("%s\n" % i)
+                fp.write(b"%s\n" % i)
             fp.close()
 
         if self.applieddirty:
@@ -787,14 +788,14 @@
             self.added = []
 
     def removeundo(self, repo):
-        undo = repo.sjoin('undo')
+        undo = repo.sjoin(b'undo')
         if not os.path.exists(undo):
             return
         try:
             os.unlink(undo)
         except OSError as inst:
             self.ui.warn(
-                _('error removing undo: %s\n') % stringutil.forcebytestr(inst)
+                _(b'error removing undo: %s\n') % stringutil.forcebytestr(inst)
             )
 
     def backup(self, repo, files, copy=False):
@@ -804,7 +805,7 @@
             if os.path.lexists(absf):
                 absorig = scmutil.backuppath(self.ui, repo, f)
                 self.ui.note(
-                    _('saving current version of %s as %s\n')
+                    _(b'saving current version of %s as %s\n')
                     % (f, os.path.relpath(absorig))
                 )
 
@@ -826,7 +827,7 @@
     ):
         if opts is None:
             opts = {}
-        stat = opts.get('stat')
+        stat = opts.get(b'stat')
         m = scmutil.match(repo[node1], files, opts)
         logcmdutil.diffordiffstat(
             self.ui, repo, diffopts, node1, node2, m, changes, stat, fp
@@ -842,9 +843,9 @@
             return (err, n)
 
         if n is None:
-            raise error.Abort(_("apply failed for patch %s") % patch)
-
-        self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
+            raise error.Abort(_(b"apply failed for patch %s") % patch)
+
+        self.ui.warn(_(b"patch didn't work out, merging %s\n") % patch)
 
         # apply failed, strip away that rev and merge.
         hg.clean(repo, head)
@@ -853,17 +854,17 @@
         ctx = repo[rev]
         ret = hg.merge(repo, rev)
         if ret:
-            raise error.Abort(_("update returned %d") % ret)
+            raise error.Abort(_(b"update returned %d") % ret)
         n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
         if n is None:
-            raise error.Abort(_("repo commit failed"))
+            raise error.Abort(_(b"repo commit failed"))
         try:
             ph = patchheader(mergeq.join(patch), self.plainmode)
         except Exception:
-            raise error.Abort(_("unable to read %s") % patch)
+            raise error.Abort(_(b"unable to read %s") % patch)
 
         diffopts = self.patchopts(diffopts, patch)
-        patchf = self.opener(patch, "w")
+        patchf = self.opener(patch, b"w")
         comments = bytes(ph)
         if comments:
             patchf.write(comments)
@@ -901,8 +902,8 @@
             # so, we insert a merge marker with only one parent.  This way
             # the first patch in the queue is never a merge patch
             #
-            pname = ".hg.patches.merge.marker"
-            n = newcommit(repo, None, '[mq]: merge marker', force=True)
+            pname = b".hg.patches.merge.marker"
+            n = newcommit(repo, None, b'[mq]: merge marker', force=True)
             self.removeundo(repo)
             self.applied.append(statusentry(n, pname))
             self.applieddirty = True
@@ -912,7 +913,7 @@
         for patch in series:
             patch = mergeq.lookup(patch, strict=True)
             if not patch:
-                self.ui.warn(_("patch %s does not exist\n") % patch)
+                self.ui.warn(_(b"patch %s does not exist\n") % patch)
                 return (1, None)
             pushable, reason = self.pushable(patch)
             if not pushable:
@@ -920,7 +921,7 @@
                 continue
             info = mergeq.isapplied(patch)
             if not info:
-                self.ui.warn(_("patch %s is not applied\n") % patch)
+                self.ui.warn(_(b"patch %s is not applied\n") % patch)
                 return (1, None)
             rev = info[1]
             err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
@@ -942,9 +943,9 @@
             )
             return (True, list(files), fuzz)
         except Exception as inst:
-            self.ui.note(stringutil.forcebytestr(inst) + '\n')
+            self.ui.note(stringutil.forcebytestr(inst) + b'\n')
             if not self.ui.verbose:
-                self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
+                self.ui.warn(_(b"patch failed, unable to continue (try -v)\n"))
             self.ui.traceback()
             return (False, list(files), False)
 
@@ -965,7 +966,7 @@
         try:
             wlock = repo.wlock()
             lock = repo.lock()
-            tr = repo.transaction("qpush")
+            tr = repo.transaction(b"qpush")
             try:
                 ret = self._apply(
                     repo,
@@ -1025,25 +1026,25 @@
             if not pushable:
                 self.explainpushable(patchname, all_patches=True)
                 continue
-            self.ui.status(_("applying %s\n") % patchname)
+            self.ui.status(_(b"applying %s\n") % patchname)
             pf = os.path.join(patchdir, patchname)
 
             try:
                 ph = patchheader(self.join(patchname), self.plainmode)
             except IOError:
-                self.ui.warn(_("unable to read %s\n") % patchname)
+                self.ui.warn(_(b"unable to read %s\n") % patchname)
                 err = 1
                 break
 
             message = ph.message
             if not message:
                 # The commit message should not be translated
-                message = "imported patch %s\n" % patchname
+                message = b"imported patch %s\n" % patchname
             else:
                 if list:
                     # The commit message should not be translated
-                    message.append("\nimported patch %s" % patchname)
-                message = '\n'.join(message)
+                    message.append(b"\nimported patch %s" % patchname)
+                message = b'\n'.join(message)
 
             if ph.haspatch:
                 if tobackup:
@@ -1051,8 +1052,8 @@
                     touched = set(touched) & tobackup
                     if touched and keepchanges:
                         raise AbortNoCleanup(
-                            _("conflicting local changes found"),
-                            hint=_("did you forget to qrefresh?"),
+                            _(b"conflicting local changes found"),
+                            hint=_(b"did you forget to qrefresh?"),
                         )
                     self.backup(repo, touched, copy=True)
                     tobackup = tobackup - touched
@@ -1061,7 +1062,7 @@
                     all_files.update(files)
                 patcherr = not patcherr
             else:
-                self.ui.warn(_("patch %s is empty\n") % patchname)
+                self.ui.warn(_(b"patch %s is empty\n") % patchname)
                 patcherr, files, fuzz = 0, [], 0
 
             if merge and files:
@@ -1081,9 +1082,9 @@
                     p1 = repo.dirstate.p1()
                     repo.setparents(p1, merge)
 
-            if all_files and '.hgsubstate' in all_files:
+            if all_files and b'.hgsubstate' in all_files:
                 wctx = repo[None]
-                pctx = repo['.']
+                pctx = repo[b'.']
                 overwrite = False
                 mergedsubstate = subrepoutil.submerge(
                     repo, pctx, wctx, wctx, overwrite
@@ -1096,22 +1097,24 @@
                 repo, None, message, ph.user, ph.date, match=match, force=True
             )
             if repo.changelog.tip() == oldtip:
-                raise error.Abort(_("qpush exactly duplicates child changeset"))
+                raise error.Abort(
+                    _(b"qpush exactly duplicates child changeset")
+                )
             if n is None:
-                raise error.Abort(_("repository commit failed"))
+                raise error.Abort(_(b"repository commit failed"))
 
             if update_status:
                 self.applied.append(statusentry(n, patchname))
 
             if patcherr:
                 self.ui.warn(
-                    _("patch failed, rejects left in working " "directory\n")
+                    _(b"patch failed, rejects left in working " b"directory\n")
                 )
                 err = 2
                 break
 
             if fuzz and strict:
-                self.ui.warn(_("fuzz found when applying patch, stopping\n"))
+                self.ui.warn(_(b"fuzz found when applying patch, stopping\n"))
                 err = 3
                 break
         return (err, n)
@@ -1155,11 +1158,11 @@
             if numrevs:
                 rev = dict((entry.name, entry.node) for entry in qfinished)
                 for p in unknown:
-                    msg = _('revision %s refers to unknown patches: %s\n')
+                    msg = _(b'revision %s refers to unknown patches: %s\n')
                     self.ui.warn(msg % (short(rev[p]), p))
             else:
-                msg = _('unknown patches: %s\n')
-                raise error.Abort(''.join(msg % p for p in unknown))
+                msg = _(b'unknown patches: %s\n')
+                raise error.Abort(b''.join(msg % p for p in unknown))
 
         self.parseseries()
         self.seriesdirty = True
@@ -1171,18 +1174,18 @@
         for i, rev in enumerate(revs):
 
             if rev < firstrev:
-                raise error.Abort(_('revision %d is not managed') % rev)
+                raise error.Abort(_(b'revision %d is not managed') % rev)
 
             ctx = repo[rev]
             base = self.applied[i].node
             if ctx.node() != base:
-                msg = _('cannot delete revision %d above applied patches')
+                msg = _(b'cannot delete revision %d above applied patches')
                 raise error.Abort(msg % rev)
 
             patch = self.applied[i].name
-            for fmt in ('[mq]: %s', 'imported patch %s'):
+            for fmt in (b'[mq]: %s', b'imported patch %s'):
                 if ctx.description() == fmt % patch:
-                    msg = _('patch %s finalized without changeset message\n')
+                    msg = _(b'patch %s finalized without changeset message\n')
                     repo.ui.status(msg % patch)
                     break
 
@@ -1195,18 +1198,18 @@
         repo._phasecache
         patches = self._revpatches(repo, sorted(revs))
         qfinished = self._cleanup(patches, len(patches))
-        if qfinished and repo.ui.configbool('mq', 'secret'):
+        if qfinished and repo.ui.configbool(b'mq', b'secret'):
             # only use this logic when the secret option is added
             oldqbase = repo[qfinished[0]]
             tphase = phases.newcommitphase(repo.ui)
             if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
-                with repo.transaction('qfinish') as tr:
+                with repo.transaction(b'qfinish') as tr:
                     phases.advanceboundary(repo, tr, tphase, qfinished)
 
     def delete(self, repo, patches, opts):
-        if not patches and not opts.get('rev'):
+        if not patches and not opts.get(b'rev'):
             raise error.Abort(
-                _('qdelete requires at least one revision or ' 'patch name')
+                _(b'qdelete requires at least one revision or ' b'patch name')
             )
 
         realpatches = []
@@ -1214,23 +1217,23 @@
             patch = self.lookup(patch, strict=True)
             info = self.isapplied(patch)
             if info:
-                raise error.Abort(_("cannot delete applied patch %s") % patch)
+                raise error.Abort(_(b"cannot delete applied patch %s") % patch)
             if patch not in self.series:
-                raise error.Abort(_("patch %s not in series file") % patch)
+                raise error.Abort(_(b"patch %s not in series file") % patch)
             if patch not in realpatches:
                 realpatches.append(patch)
 
         numrevs = 0
-        if opts.get('rev'):
+        if opts.get(b'rev'):
             if not self.applied:
-                raise error.Abort(_('no patches applied'))
-            revs = scmutil.revrange(repo, opts.get('rev'))
+                raise error.Abort(_(b'no patches applied'))
+            revs = scmutil.revrange(repo, opts.get(b'rev'))
             revs.sort()
             revpatches = self._revpatches(repo, revs)
             realpatches += revpatches
             numrevs = len(revpatches)
 
-        self._cleanup(realpatches, numrevs, opts.get('keep'))
+        self._cleanup(realpatches, numrevs, opts.get(b'keep'))
 
     def checktoppatch(self, repo):
         '''check that working directory is at qtip'''
@@ -1238,64 +1241,66 @@
             top = self.applied[-1].node
             patch = self.applied[-1].name
             if repo.dirstate.p1() != top:
-                raise error.Abort(_("working directory revision is not qtip"))
+                raise error.Abort(_(b"working directory revision is not qtip"))
             return top, patch
         return None, None
 
     def putsubstate2changes(self, substatestate, changes):
         for files in changes[:3]:
-            if '.hgsubstate' in files:
+            if b'.hgsubstate' in files:
                 return  # already listed up
         # not yet listed up
-        if substatestate in 'a?':
-            changes[1].append('.hgsubstate')
-        elif substatestate in 'r':
-            changes[2].append('.hgsubstate')
+        if substatestate in b'a?':
+            changes[1].append(b'.hgsubstate')
+        elif substatestate in b'r':
+            changes[2].append(b'.hgsubstate')
         else:  # modified
-            changes[0].append('.hgsubstate')
+            changes[0].append(b'.hgsubstate')
 
     def checklocalchanges(self, repo, force=False, refresh=True):
-        excsuffix = ''
+        excsuffix = b''
         if refresh:
-            excsuffix = ', qrefresh first'
+            excsuffix = b', qrefresh first'
             # plain versions for i18n tool to detect them
-            _("local changes found, qrefresh first")
-            _("local changed subrepos found, qrefresh first")
+            _(b"local changes found, qrefresh first")
+            _(b"local changed subrepos found, qrefresh first")
 
         s = repo.status()
         if not force:
             cmdutil.checkunfinished(repo)
             if s.modified or s.added or s.removed or s.deleted:
-                _("local changes found")  # i18n tool detection
-                raise error.Abort(_("local changes found" + excsuffix))
+                _(b"local changes found")  # i18n tool detection
+                raise error.Abort(_(b"local changes found" + excsuffix))
             if checksubstate(repo):
-                _("local changed subrepos found")  # i18n tool detection
-                raise error.Abort(_("local changed subrepos found" + excsuffix))
+                _(b"local changed subrepos found")  # i18n tool detection
+                raise error.Abort(
+                    _(b"local changed subrepos found" + excsuffix)
+                )
         else:
             cmdutil.checkunfinished(repo, skipmerge=True)
         return s
 
-    _reserved = ('series', 'status', 'guards', '.', '..')
+    _reserved = (b'series', b'status', b'guards', b'.', b'..')
 
     def checkreservedname(self, name):
         if name in self._reserved:
             raise error.Abort(
-                _('"%s" cannot be used as the name of a patch') % name
+                _(b'"%s" cannot be used as the name of a patch') % name
             )
         if name != name.strip():
             # whitespace is stripped by parseseries()
             raise error.Abort(
-                _('patch name cannot begin or end with ' 'whitespace')
+                _(b'patch name cannot begin or end with ' b'whitespace')
             )
-        for prefix in ('.hg', '.mq'):
+        for prefix in (b'.hg', b'.mq'):
             if name.startswith(prefix):
                 raise error.Abort(
-                    _('patch name cannot begin with "%s"') % prefix
+                    _(b'patch name cannot begin with "%s"') % prefix
                 )
-        for c in ('#', ':', '\r', '\n'):
+        for c in (b'#', b':', b'\r', b'\n'):
             if c in name:
                 raise error.Abort(
-                    _('%r cannot be used in the name of a patch')
+                    _(b'%r cannot be used in the name of a patch')
                     % pycompat.bytestr(c)
                 )
 
@@ -1304,10 +1309,10 @@
         if not force and os.path.exists(self.join(name)):
             if os.path.isdir(self.join(name)):
                 raise error.Abort(
-                    _('"%s" already exists as a directory') % name
+                    _(b'"%s" already exists as a directory') % name
                 )
             else:
-                raise error.Abort(_('patch "%s" already exists') % name)
+                raise error.Abort(_(b'patch "%s" already exists') % name)
 
     def makepatchname(self, title, fallbackname):
         """Return a suitable filename for title, adding a suffix to make
@@ -1331,36 +1336,36 @@
                 except error.Abort:
                     pass
             i += 1
-            name = '%s__%d' % (namebase, i)
+            name = b'%s__%d' % (namebase, i)
         return name
 
     def checkkeepchanges(self, keepchanges, force):
         if force and keepchanges:
-            raise error.Abort(_('cannot use both --force and --keep-changes'))
+            raise error.Abort(_(b'cannot use both --force and --keep-changes'))
 
     def new(self, repo, patchfn, *pats, **opts):
         """options:
            msg: a string or a no-argument function returning a string
         """
         opts = pycompat.byteskwargs(opts)
-        msg = opts.get('msg')
-        edit = opts.get('edit')
-        editform = opts.get('editform', 'mq.qnew')
-        user = opts.get('user')
-        date = opts.get('date')
+        msg = opts.get(b'msg')
+        edit = opts.get(b'edit')
+        editform = opts.get(b'editform', b'mq.qnew')
+        user = opts.get(b'user')
+        date = opts.get(b'date')
         if date:
             date = dateutil.parsedate(date)
-        diffopts = self.diffopts({'git': opts.get('git')}, plain=True)
-        if opts.get('checkname', True):
+        diffopts = self.diffopts({b'git': opts.get(b'git')}, plain=True)
+        if opts.get(b'checkname', True):
             self.checkpatchname(patchfn)
         inclsubs = checksubstate(repo)
         if inclsubs:
-            substatestate = repo.dirstate['.hgsubstate']
-        if opts.get('include') or opts.get('exclude') or pats:
+            substatestate = repo.dirstate[b'.hgsubstate']
+        if opts.get(b'include') or opts.get(b'exclude') or pats:
             # detect missing files in pats
             def badfn(f, msg):
-                if f != '.hgsubstate':  # .hgsubstate is auto-created
-                    raise error.Abort('%s: %s' % (f, msg))
+                if f != b'.hgsubstate':  # .hgsubstate is auto-created
+                    raise error.Abort(b'%s: %s' % (f, msg))
 
             match = scmutil.match(repo[None], pats, opts, badfn=badfn)
             changes = repo.status(match=match)
@@ -1371,20 +1376,20 @@
             commitfiles.extend(files)
         match = scmutil.matchfiles(repo, commitfiles)
         if len(repo[None].parents()) > 1:
-            raise error.Abort(_('cannot manage merge changesets'))
+            raise error.Abort(_(b'cannot manage merge changesets'))
         self.checktoppatch(repo)
         insert = self.fullseriesend()
         with repo.wlock():
             try:
                 # if patch file write fails, abort early
-                p = self.opener(patchfn, "w")
+                p = self.opener(patchfn, b"w")
             except IOError as e:
                 raise error.Abort(
-                    _('cannot write patch "%s": %s')
+                    _(b'cannot write patch "%s": %s')
                     % (patchfn, encoding.strtolocal(e.strerror))
                 )
             try:
-                defaultmsg = "[mq]: %s" % patchfn
+                defaultmsg = b"[mq]: %s" % patchfn
                 editor = cmdutil.getcommiteditor(editform=editform)
                 if edit:
 
@@ -1395,7 +1400,7 @@
                             return defaultmsg
 
                     # i18n: this message is shown in editor with "HG: " prefix
-                    extramsg = _('Leave message empty to use default message.')
+                    extramsg = _(b'Leave message empty to use default message.')
                     editor = cmdutil.getcommiteditor(
                         finishdesc=finishdesc,
                         extramsg=extramsg,
@@ -1416,7 +1421,7 @@
                     editor=editor,
                 )
                 if n is None:
-                    raise error.Abort(_("repo commit failed"))
+                    raise error.Abort(_(b"repo commit failed"))
                 try:
                     self.fullseries[insert:insert] = [patchfn]
                     self.applied.append(statusentry(n, patchfn))
@@ -1428,11 +1433,11 @@
                     if user:
                         ph.setuser(user)
                     if date:
-                        ph.setdate('%d %d' % date)
+                        ph.setdate(b'%d %d' % date)
                     ph.setparent(hex(nctx.p1().node()))
                     msg = nctx.description().strip()
                     if msg == defaultmsg.strip():
-                        msg = ''
+                        msg = b''
                     ph.setmessage(msg)
                     p.write(bytes(ph))
                     if commitfiles:
@@ -1460,7 +1465,7 @@
                 try:
                     os.unlink(patchpath)
                 except OSError:
-                    self.ui.warn(_('error unlinking %s\n') % patchpath)
+                    self.ui.warn(_(b'error unlinking %s\n') % patchpath)
                 raise
             self.removeundo(repo)
 
@@ -1483,16 +1488,16 @@
                 return s
             matches = [x for x in self.series if s in x]
             if len(matches) > 1:
-                self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
+                self.ui.warn(_(b'patch name "%s" is ambiguous:\n') % s)
                 for m in matches:
-                    self.ui.warn('  %s\n' % m)
+                    self.ui.warn(b'  %s\n' % m)
                 return None
             if matches:
                 return matches[0]
             if self.series and self.applied:
-                if s == 'qtip':
+                if s == b'qtip':
                     return self.series[self.seriesend(True) - 1]
-                if s == 'qbase':
+                if s == b'qbase':
                     return self.series[0]
             return None
 
@@ -1512,7 +1517,7 @@
                 res = partialname(patch)
                 if res:
                     return res
-                minus = patch.rfind('-')
+                minus = patch.rfind(b'-')
                 if minus >= 0:
                     res = partialname(patch[:minus])
                     if res:
@@ -1524,7 +1529,7 @@
                         else:
                             if i - off >= 0:
                                 return self.series[i - off]
-                plus = patch.rfind('+')
+                plus = patch.rfind(b'+')
                 if plus >= 0:
                     res = partialname(patch[:plus])
                     if res:
@@ -1536,7 +1541,7 @@
                         else:
                             if i + off < len(self.series):
                                 return self.series[i + off]
-        raise error.Abort(_("patch %s not in series") % patch)
+        raise error.Abort(_(b"patch %s not in series") % patch)
 
     def push(
         self,
@@ -1560,10 +1565,10 @@
             if not heads:
                 heads = [nullid]
             if repo.dirstate.p1() not in heads and not exact:
-                self.ui.status(_("(working directory not at a head)\n"))
+                self.ui.status(_(b"(working directory not at a head)\n"))
 
             if not self.series:
-                self.ui.warn(_('no patches in series\n'))
+                self.ui.warn(_(b'no patches in series\n'))
                 return 0
 
             # Suppose our series file is: A B C and the current 'top'
@@ -1574,26 +1579,30 @@
                 patch = self.lookup(patch)
                 info = self.isapplied(patch)
                 if info and info[0] >= len(self.applied) - 1:
-                    self.ui.warn(_('qpush: %s is already at the top\n') % patch)
+                    self.ui.warn(
+                        _(b'qpush: %s is already at the top\n') % patch
+                    )
                     return 0
 
                 pushable, reason = self.pushable(patch)
                 if pushable:
                     if self.series.index(patch) < self.seriesend():
                         raise error.Abort(
-                            _("cannot push to a previous patch: %s") % patch
+                            _(b"cannot push to a previous patch: %s") % patch
                         )
                 else:
                     if reason:
-                        reason = _('guarded by %s') % reason
+                        reason = _(b'guarded by %s') % reason
                     else:
-                        reason = _('no matching guards')
-                    self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
+                        reason = _(b'no matching guards')
+                    self.ui.warn(
+                        _(b"cannot push '%s' - %s\n") % (patch, reason)
+                    )
                     return 1
             elif all:
                 patch = self.series[-1]
                 if self.isapplied(patch):
-                    self.ui.warn(_('all patches are currently applied\n'))
+                    self.ui.warn(_(b'all patches are currently applied\n'))
                     return 0
 
             # Following the above example, starting at 'top' of B:
@@ -1603,7 +1612,7 @@
             # work as it detects an error when done
             start = self.seriesend()
             if start == len(self.series):
-                self.ui.warn(_('patch series already fully applied\n'))
+                self.ui.warn(_(b'patch series already fully applied\n'))
                 return 1
             if not force and not keepchanges:
                 self.checklocalchanges(repo, refresh=self.applied)
@@ -1611,28 +1620,28 @@
             if exact:
                 if keepchanges:
                     raise error.Abort(
-                        _("cannot use --exact and --keep-changes together")
+                        _(b"cannot use --exact and --keep-changes together")
                     )
                 if move:
                     raise error.Abort(
-                        _('cannot use --exact and --move ' 'together')
+                        _(b'cannot use --exact and --move ' b'together')
                     )
                 if self.applied:
                     raise error.Abort(
-                        _('cannot push --exact with applied ' 'patches')
+                        _(b'cannot push --exact with applied ' b'patches')
                     )
                 root = self.series[start]
                 target = patchheader(self.join(root), self.plainmode).parent
                 if not target:
                     raise error.Abort(
-                        _("%s does not have a parent recorded") % root
+                        _(b"%s does not have a parent recorded") % root
                     )
-                if not repo[target] == repo['.']:
+                if not repo[target] == repo[b'.']:
                     hg.update(repo, target)
 
             if move:
                 if not patch:
-                    raise error.Abort(_("please specify the patch to move"))
+                    raise error.Abort(_(b"please specify the patch to move"))
                 for fullstart, rpn in enumerate(self.fullseries):
                     # strip markers for patch guards
                     if self.guard_re.split(rpn, 1)[0] == self.series[start]:
@@ -1688,11 +1697,11 @@
             except AbortNoCleanup:
                 raise
             except:  # re-raises
-                self.ui.warn(_('cleaning up working directory...\n'))
+                self.ui.warn(_(b'cleaning up working directory...\n'))
                 cmdutil.revert(
                     self.ui,
                     repo,
-                    repo['.'],
+                    repo[b'.'],
                     repo.dirstate.parents(),
                     no_backup=True,
                 )
@@ -1701,17 +1710,17 @@
                 for f in all_files:
                     if f not in repo.dirstate:
                         repo.wvfs.unlinkpath(f, ignoremissing=True)
-                self.ui.warn(_('done\n'))
+                self.ui.warn(_(b'done\n'))
                 raise
 
             if not self.applied:
                 return ret[0]
             top = self.applied[-1].name
             if ret[0] and ret[0] > 1:
-                msg = _("errors during apply, please fix and qrefresh %s\n")
+                msg = _(b"errors during apply, please fix and qrefresh %s\n")
                 self.ui.write(msg % top)
             else:
-                self.ui.write(_("now at: %s\n") % top)
+                self.ui.write(_(b"now at: %s\n") % top)
             return ret[0]
 
     def pop(
@@ -1733,12 +1742,12 @@
                     patch = self.lookup(patch)
                 info = self.isapplied(patch)
                 if not info:
-                    raise error.Abort(_("patch %s is not applied") % patch)
+                    raise error.Abort(_(b"patch %s is not applied") % patch)
 
             if not self.applied:
                 # Allow qpop -a to work repeatedly,
                 # but not qpop without an argument
-                self.ui.warn(_("no patches applied\n"))
+                self.ui.warn(_(b"no patches applied\n"))
                 return not all
 
             if all:
@@ -1749,7 +1758,7 @@
                 start = len(self.applied) - 1
 
             if start >= len(self.applied):
-                self.ui.warn(_("qpop: %s is already at the top\n") % patch)
+                self.ui.warn(_(b"qpop: %s is already at the top\n") % patch)
                 return
 
             if not update:
@@ -1757,7 +1766,7 @@
                 rr = [x.node for x in self.applied]
                 for p in parents:
                     if p in rr:
-                        self.ui.warn(_("qpop: forcing dirstate update\n"))
+                        self.ui.warn(_(b"qpop: forcing dirstate update\n"))
                         update = True
             else:
                 parents = [p.node() for p in repo[None].parents()]
@@ -1784,19 +1793,19 @@
                 heads = repo.changelog.heads(rev)
             except error.LookupError:
                 node = short(rev)
-                raise error.Abort(_('trying to pop unknown node %s') % node)
+                raise error.Abort(_(b'trying to pop unknown node %s') % node)
 
             if heads != [self.applied[-1].node]:
                 raise error.Abort(
                     _(
-                        "popping would remove a revision not "
-                        "managed by this patch queue"
+                        b"popping would remove a revision not "
+                        b"managed by this patch queue"
                     )
                 )
             if not repo[self.applied[-1].node].mutable():
                 raise error.Abort(
-                    _("popping would remove a public revision"),
-                    hint=_("see 'hg help phases' for details"),
+                    _(b"popping would remove a public revision"),
+                    hint=_(b"see 'hg help phases' for details"),
                 )
 
             # we know there are no local changes, so we can make a simplified
@@ -1804,13 +1813,13 @@
             if update:
                 qp = self.qparents(repo, rev)
                 ctx = repo[qp]
-                m, a, r, d = repo.status(qp, '.')[:4]
+                m, a, r, d = repo.status(qp, b'.')[:4]
                 if d:
-                    raise error.Abort(_("deletions found between repo revs"))
+                    raise error.Abort(_(b"deletions found between repo revs"))
 
                 tobackup = set(a + m + r) & tobackup
                 if keepchanges and tobackup:
-                    raise error.Abort(_("local changes found, qrefresh first"))
+                    raise error.Abort(_(b"local changes found, qrefresh first"))
                 self.backup(repo, tobackup)
                 with repo.dirstate.parentchange():
                     for f in a:
@@ -1822,23 +1831,23 @@
                         repo.dirstate.normal(f)
                     repo.setparents(qp, nullid)
             for patch in reversed(self.applied[start:end]):
-                self.ui.status(_("popping %s\n") % patch.name)
+                self.ui.status(_(b"popping %s\n") % patch.name)
             del self.applied[start:end]
             strip(self.ui, repo, [rev], update=False, backup=False)
-            for s, state in repo['.'].substate.items():
-                repo['.'].sub(s).get(state)
+            for s, state in repo[b'.'].substate.items():
+                repo[b'.'].sub(s).get(state)
             if self.applied:
-                self.ui.write(_("now at: %s\n") % self.applied[-1].name)
+                self.ui.write(_(b"now at: %s\n") % self.applied[-1].name)
             else:
-                self.ui.write(_("patch queue now empty\n"))
+                self.ui.write(_(b"patch queue now empty\n"))
 
     def diff(self, repo, pats, opts):
         top, patch = self.checktoppatch(repo)
         if not top:
-            self.ui.write(_("no patches applied\n"))
+            self.ui.write(_(b"no patches applied\n"))
             return
         qp = self.qparents(repo, top)
-        if opts.get('reverse'):
+        if opts.get(b'reverse'):
             node1, node2 = None, qp
         else:
             node1, node2 = qp, None
@@ -1848,26 +1857,28 @@
     def refresh(self, repo, pats=None, **opts):
         opts = pycompat.byteskwargs(opts)
         if not self.applied:
-            self.ui.write(_("no patches applied\n"))
+            self.ui.write(_(b"no patches applied\n"))
             return 1
-        msg = opts.get('msg', '').rstrip()
-        edit = opts.get('edit')
-        editform = opts.get('editform', 'mq.qrefresh')
-        newuser = opts.get('user')
-        newdate = opts.get('date')
+        msg = opts.get(b'msg', b'').rstrip()
+        edit = opts.get(b'edit')
+        editform = opts.get(b'editform', b'mq.qrefresh')
+        newuser = opts.get(b'user')
+        newdate = opts.get(b'date')
         if newdate:
-            newdate = '%d %d' % dateutil.parsedate(newdate)
+            newdate = b'%d %d' % dateutil.parsedate(newdate)
         wlock = repo.wlock()
 
         try:
             self.checktoppatch(repo)
             (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
             if repo.changelog.heads(top) != [top]:
-                raise error.Abort(_("cannot qrefresh a revision with children"))
+                raise error.Abort(
+                    _(b"cannot qrefresh a revision with children")
+                )
             if not repo[top].mutable():
                 raise error.Abort(
-                    _("cannot qrefresh public revision"),
-                    hint=_("see 'hg help phases' for details"),
+                    _(b"cannot qrefresh public revision"),
+                    hint=_(b"see 'hg help phases' for details"),
                 )
 
             cparents = repo.changelog.parents(top)
@@ -1875,11 +1886,11 @@
 
             inclsubs = checksubstate(repo, patchparent)
             if inclsubs:
-                substatestate = repo.dirstate['.hgsubstate']
+                substatestate = repo.dirstate[b'.hgsubstate']
 
             ph = patchheader(self.join(patchfn), self.plainmode)
             diffopts = self.diffopts(
-                {'git': opts.get('git')}, patchfn, plain=True
+                {b'git': opts.get(b'git')}, patchfn, plain=True
             )
             if newuser:
                 ph.setuser(newuser)
@@ -1888,7 +1899,7 @@
             ph.setparent(hex(patchparent))
 
             # only commit new patch when write is complete
-            patchf = self.opener(patchfn, 'w', atomictemp=True)
+            patchf = self.opener(patchfn, b'w', atomictemp=True)
 
             # update the dirstate in place, strip off the qtip commit
             # and then commit.
@@ -1903,7 +1914,7 @@
             match1 = scmutil.match(repo[None], pats, opts)
             # in short mode, we only diff the files included in the
             # patch already plus specified files
-            if opts.get('short'):
+            if opts.get(b'short'):
                 # if amending a patch, we start with existing
                 # files plus specified files - unfiltered
                 match = scmutil.matchfiles(repo, mm + aa + dd + match1.files())
@@ -1963,7 +1974,7 @@
 
             dsguard = None
             try:
-                dsguard = dirstateguard.dirstateguard(repo, 'mq.refresh')
+                dsguard = dirstateguard.dirstateguard(repo, b'mq.refresh')
                 if diffopts.git or diffopts.upgrade:
                     copies = {}
                     for dst in a:
@@ -2026,7 +2037,7 @@
             try:
                 # might be nice to attempt to roll back strip after this
 
-                defaultmsg = "[mq]: %s" % patchfn
+                defaultmsg = b"[mq]: %s" % patchfn
                 editor = cmdutil.getcommiteditor(editform=editform)
                 if edit:
 
@@ -2037,18 +2048,18 @@
                         return defaultmsg
 
                     # i18n: this message is shown in editor with "HG: " prefix
-                    extramsg = _('Leave message empty to use default message.')
+                    extramsg = _(b'Leave message empty to use default message.')
                     editor = cmdutil.getcommiteditor(
                         finishdesc=finishdesc,
                         extramsg=extramsg,
                         editform=editform,
                     )
-                    message = msg or "\n".join(ph.message)
+                    message = msg or b"\n".join(ph.message)
                 elif not msg:
                     if not ph.message:
                         message = defaultmsg
                     else:
-                        message = "\n".join(ph.message)
+                        message = b"\n".join(ph.message)
                 else:
                     message = msg
                     ph.setmessage(msg)
@@ -2058,7 +2069,7 @@
                 lock = tr = None
                 try:
                     lock = repo.lock()
-                    tr = repo.transaction('mq')
+                    tr = repo.transaction(b'mq')
                     n = newcommit(
                         repo,
                         oldphase,
@@ -2096,8 +2107,8 @@
                 self.savedirty()
                 self.ui.warn(
                     _(
-                        'qrefresh interrupted while patch was popped! '
-                        '(revert --all, qpush to recover)\n'
+                        b'qrefresh interrupted while patch was popped! '
+                        b'(revert --all, qpush to recover)\n'
                     )
                 )
                 raise
@@ -2107,7 +2118,7 @@
 
     def init(self, repo, create=False):
         if not create and os.path.isdir(self.path):
-            raise error.Abort(_("patch queue directory already exists"))
+            raise error.Abort(_(b"patch queue directory already exists"))
         try:
             os.mkdir(self.path)
         except OSError as inst:
@@ -2118,7 +2129,7 @@
 
     def unapplied(self, repo, patch=None):
         if patch and patch not in self.series:
-            raise error.Abort(_("patch %s is not in series file") % patch)
+            raise error.Abort(_(b"patch %s is not in series file") % patch)
         if not patch:
             start = self.seriesend()
         else:
@@ -2148,38 +2159,38 @@
                 if ph.message:
                     msg = ph.message[0]
                 else:
-                    msg = ''
+                    msg = b''
 
                 if self.ui.formatted():
                     width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
                     if width > 0:
                         msg = stringutil.ellipsis(msg, width)
                     else:
-                        msg = ''
-                self.ui.write(patchname, label='qseries.' + state)
-                self.ui.write(': ')
-                self.ui.write(msg, label='qseries.message.' + state)
+                        msg = b''
+                self.ui.write(patchname, label=b'qseries.' + state)
+                self.ui.write(b': ')
+                self.ui.write(msg, label=b'qseries.message.' + state)
             else:
-                self.ui.write(patchname, label='qseries.' + state)
-            self.ui.write('\n')
+                self.ui.write(patchname, label=b'qseries.' + state)
+            self.ui.write(b'\n')
 
         applied = {p.name for p in self.applied}
         if length is None:
             length = len(self.series) - start
         if not missing:
             if self.ui.verbose:
-                idxwidth = len("%d" % (start + length - 1))
+                idxwidth = len(b"%d" % (start + length - 1))
             for i in pycompat.xrange(start, start + length):
                 patch = self.series[i]
                 if patch in applied:
-                    char, state = 'A', 'applied'
+                    char, state = b'A', b'applied'
                 elif self.pushable(i)[0]:
-                    char, state = 'U', 'unapplied'
+                    char, state = b'U', b'unapplied'
                 else:
-                    char, state = 'G', 'guarded'
-                pfx = ''
+                    char, state = b'G', b'guarded'
+                pfx = b''
                 if self.ui.verbose:
-                    pfx = '%*d %s ' % (idxwidth, i, char)
+                    pfx = b'%*d %s ' % (idxwidth, i, char)
                 elif status and status != char:
                     continue
                 displayname(pfx, patch, state)
@@ -2197,15 +2208,15 @@
                             self.seriespath,
                             self.guardspath,
                         )
-                        and not fl.startswith('.')
+                        and not fl.startswith(b'.')
                     ):
                         msng_list.append(fl)
             for x in sorted(msng_list):
-                pfx = self.ui.verbose and 'D ' or ''
-                displayname(pfx, x, 'missing')
+                pfx = self.ui.verbose and b'D ' or b''
+                displayname(pfx, x, b'missing')
 
     def issaveline(self, l):
-        if l.name == '.hg.patches.save.line':
+        if l.name == b'.hg.patches.save.line':
             return True
 
     def qrepo(self, create=False):
@@ -2215,15 +2226,18 @@
             ui.pageractive = self.ui.pageractive
             # internal config: ui.formatted
             ui.setconfig(
-                'ui', 'formatted', self.ui.config('ui', 'formatted'), 'mqpager'
+                b'ui',
+                b'formatted',
+                self.ui.config(b'ui', b'formatted'),
+                b'mqpager',
             )
             ui.setconfig(
-                'ui',
-                'interactive',
-                self.ui.config('ui', 'interactive'),
-                'mqpager',
+                b'ui',
+                b'interactive',
+                self.ui.config(b'ui', b'interactive'),
+                b'mqpager',
             )
-        if create or os.path.isdir(self.join(".hg")):
+        if create or os.path.isdir(self.join(b".hg")):
             return hg.repository(ui, path=self.path, create=create)
 
     def restore(self, repo, rev, delete=None, qupdate=None):
@@ -2235,23 +2249,23 @@
         applied = []
         qpp = None
         for i, line in enumerate(lines):
-            if line == 'Patch Data:':
+            if line == b'Patch Data:':
                 datastart = i + 1
-            elif line.startswith('Dirstate:'):
+            elif line.startswith(b'Dirstate:'):
                 l = line.rstrip()
-                l = l[10:].split(' ')
+                l = l[10:].split(b' ')
                 qpp = [bin(x) for x in l]
             elif datastart is not None:
                 l = line.rstrip()
-                n, name = l.split(':', 1)
+                n, name = l.split(b':', 1)
                 if n:
                     applied.append(statusentry(bin(n), name))
                 else:
                     series.append(l)
         if datastart is None:
-            self.ui.warn(_("no saved patch data found\n"))
+            self.ui.warn(_(b"no saved patch data found\n"))
             return 1
-        self.ui.warn(_("restoring status: %s\n") % lines[0])
+        self.ui.warn(_(b"restoring status: %s\n") % lines[0])
         self.fullseries = series
         self.applied = applied
         self.parseseries()
@@ -2260,9 +2274,9 @@
         heads = repo.changelog.heads()
         if delete:
             if rev not in heads:
-                self.ui.warn(_("save entry has children, leaving it alone\n"))
+                self.ui.warn(_(b"save entry has children, leaving it alone\n"))
             else:
-                self.ui.warn(_("removing save entry %s\n") % short(rev))
+                self.ui.warn(_(b"removing save entry %s\n") % short(rev))
                 pp = repo.dirstate.parents()
                 if rev in pp:
                     update = True
@@ -2271,41 +2285,41 @@
                 strip(self.ui, repo, [rev], update=update, backup=False)
         if qpp:
             self.ui.warn(
-                _("saved queue repository parents: %s %s\n")
+                _(b"saved queue repository parents: %s %s\n")
                 % (short(qpp[0]), short(qpp[1]))
             )
             if qupdate:
-                self.ui.status(_("updating queue directory\n"))
+                self.ui.status(_(b"updating queue directory\n"))
                 r = self.qrepo()
                 if not r:
-                    self.ui.warn(_("unable to load queue repository\n"))
+                    self.ui.warn(_(b"unable to load queue repository\n"))
                     return 1
                 hg.clean(r, qpp[0])
 
     def save(self, repo, msg=None):
         if not self.applied:
-            self.ui.warn(_("save: no patches applied, exiting\n"))
+            self.ui.warn(_(b"save: no patches applied, exiting\n"))
             return 1
         if self.issaveline(self.applied[-1]):
-            self.ui.warn(_("status is already saved\n"))
+            self.ui.warn(_(b"status is already saved\n"))
             return 1
 
         if not msg:
-            msg = _("hg patches saved state")
+            msg = _(b"hg patches saved state")
         else:
-            msg = "hg patches: " + msg.rstrip('\r\n')
+            msg = b"hg patches: " + msg.rstrip(b'\r\n')
         r = self.qrepo()
         if r:
             pp = r.dirstate.parents()
-            msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
-        msg += "\n\nPatch Data:\n"
-        msg += ''.join('%s\n' % x for x in self.applied)
-        msg += ''.join(':%s\n' % x for x in self.fullseries)
+            msg += b"\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
+        msg += b"\n\nPatch Data:\n"
+        msg += b''.join(b'%s\n' % x for x in self.applied)
+        msg += b''.join(b':%s\n' % x for x in self.fullseries)
         n = repo.commit(msg, force=True)
         if not n:
-            self.ui.warn(_("repo commit failed\n"))
+            self.ui.warn(_(b"repo commit failed\n"))
             return 1
-        self.applied.append(statusentry(n, '.hg.patches.save.line'))
+        self.applied.append(statusentry(n, b'.hg.patches.save.line'))
         self.applieddirty = True
         self.removeundo(repo)
 
@@ -2349,7 +2363,7 @@
         if not self.ui.verbose:
             p = pname
         else:
-            p = ("%d" % self.series.index(pname)) + " " + pname
+            p = (b"%d" % self.series.index(pname)) + b" " + pname
         return p
 
     def qimport(
@@ -2365,21 +2379,21 @@
         def checkseries(patchname):
             if patchname in self.series:
                 raise error.Abort(
-                    _('patch %s is already in the series file') % patchname
+                    _(b'patch %s is already in the series file') % patchname
                 )
 
         if rev:
             if files:
                 raise error.Abort(
-                    _('option "-r" not valid when importing ' 'files')
+                    _(b'option "-r" not valid when importing ' b'files')
                 )
             rev = scmutil.revrange(repo, rev)
             rev.sort(reverse=True)
         elif not files:
-            raise error.Abort(_('no files or revisions specified'))
+            raise error.Abort(_(b'no files or revisions specified'))
         if (len(files) > 1 or len(rev) > 1) and patchname:
             raise error.Abort(
-                _('option "-n" not valid when importing multiple ' 'patches')
+                _(b'option "-n" not valid when importing multiple ' b'patches')
             )
         imported = []
         if rev:
@@ -2389,18 +2403,18 @@
             heads = repo.changelog.heads(repo.changelog.node(rev.first()))
             if len(heads) > 1:
                 raise error.Abort(
-                    _('revision %d is the root of more than one ' 'branch')
+                    _(b'revision %d is the root of more than one ' b'branch')
                     % rev.last()
                 )
             if self.applied:
                 base = repo.changelog.node(rev.first())
                 if base in [n.node for n in self.applied]:
                     raise error.Abort(
-                        _('revision %d is already managed') % rev.first()
+                        _(b'revision %d is already managed') % rev.first()
                     )
                 if heads != [self.applied[-1].node]:
                     raise error.Abort(
-                        _('revision %d is not the parent of ' 'the queue')
+                        _(b'revision %d is not the parent of ' b'the queue')
                         % rev.first()
                     )
                 base = repo.changelog.rev(self.applied[0].node)
@@ -2408,41 +2422,41 @@
             else:
                 if heads != [repo.changelog.node(rev.first())]:
                     raise error.Abort(
-                        _('revision %d has unmanaged children') % rev.first()
+                        _(b'revision %d has unmanaged children') % rev.first()
                     )
                 lastparent = None
 
-            diffopts = self.diffopts({'git': git})
-            with repo.transaction('qimport') as tr:
+            diffopts = self.diffopts({b'git': git})
+            with repo.transaction(b'qimport') as tr:
                 for r in rev:
                     if not repo[r].mutable():
                         raise error.Abort(
-                            _('revision %d is not mutable') % r,
-                            hint=_("see 'hg help phases' " 'for details'),
+                            _(b'revision %d is not mutable') % r,
+                            hint=_(b"see 'hg help phases' " b'for details'),
                         )
                     p1, p2 = repo.changelog.parentrevs(r)
                     n = repo.changelog.node(r)
                     if p2 != nullrev:
                         raise error.Abort(
-                            _('cannot import merge revision %d') % r
+                            _(b'cannot import merge revision %d') % r
                         )
                     if lastparent and lastparent != r:
                         raise error.Abort(
-                            _('revision %d is not the parent of ' '%d')
+                            _(b'revision %d is not the parent of ' b'%d')
                             % (r, lastparent)
                         )
                     lastparent = p1
 
                     if not patchname:
                         patchname = self.makepatchname(
-                            repo[r].description().split('\n', 1)[0],
-                            '%d.diff' % r,
+                            repo[r].description().split(b'\n', 1)[0],
+                            b'%d.diff' % r,
                         )
                     checkseries(patchname)
                     self.checkpatchname(patchname, force)
                     self.fullseries.insert(0, patchname)
 
-                    with self.opener(patchname, "w") as fp:
+                    with self.opener(patchname, b"w") as fp:
                         cmdutil.exportfile(repo, [n], fp, opts=diffopts)
 
                     se = statusentry(n, patchname)
@@ -2451,7 +2465,7 @@
                     self.added.append(patchname)
                     imported.append(patchname)
                     patchname = None
-                    if rev and repo.ui.configbool('mq', 'secret'):
+                    if rev and repo.ui.configbool(b'mq', b'secret'):
                         # if we added anything with --rev, move the secret root
                         phases.retractboundary(repo, tr, phases.secret, [n])
                     self.parseseries()
@@ -2460,9 +2474,9 @@
 
         for i, filename in enumerate(files):
             if existing:
-                if filename == '-':
+                if filename == b'-':
                     raise error.Abort(
-                        _('-e is incompatible with import from -')
+                        _(b'-e is incompatible with import from -')
                     )
                 filename = normname(filename)
                 self.checkreservedname(filename)
@@ -2470,35 +2484,39 @@
                     originpath = self.join(filename)
                     if not os.path.isfile(originpath):
                         raise error.Abort(
-                            _("patch %s does not exist") % filename
+                            _(b"patch %s does not exist") % filename
                         )
 
                 if patchname:
                     self.checkpatchname(patchname, force)
 
                     self.ui.write(
-                        _('renaming %s to %s\n') % (filename, patchname)
+                        _(b'renaming %s to %s\n') % (filename, patchname)
                     )
                     util.rename(originpath, self.join(patchname))
                 else:
                     patchname = filename
 
             else:
-                if filename == '-' and not patchname:
-                    raise error.Abort(_('need --name to import a patch from -'))
+                if filename == b'-' and not patchname:
+                    raise error.Abort(
+                        _(b'need --name to import a patch from -')
+                    )
                 elif not patchname:
-                    patchname = normname(os.path.basename(filename.rstrip('/')))
+                    patchname = normname(
+                        os.path.basename(filename.rstrip(b'/'))
+                    )
                 self.checkpatchname(patchname, force)
                 try:
-                    if filename == '-':
+                    if filename == b'-':
                         text = self.ui.fin.read()
                     else:
                         fp = hg.openpath(self.ui, filename)
                         text = fp.read()
                         fp.close()
                 except (OSError, IOError):
-                    raise error.Abort(_("unable to read file %s") % filename)
-                patchf = self.opener(patchname, "w")
+                    raise error.Abort(_(b"unable to read file %s") % filename)
+                patchf = self.opener(patchname, b"w")
                 patchf.write(text)
                 patchf.close()
             if not force:
@@ -2508,7 +2526,7 @@
                 self.fullseries[index:index] = [patchname]
             self.parseseries()
             self.seriesdirty = True
-            self.ui.warn(_("adding %s to series file\n") % patchname)
+            self.ui.warn(_(b"adding %s to series file\n") % patchname)
             self.added.append(patchname)
             imported.append(patchname)
             patchname = None
@@ -2519,23 +2537,29 @@
 
 def fixkeepchangesopts(ui, opts):
     if (
-        not ui.configbool('mq', 'keepchanges')
-        or opts.get('force')
-        or opts.get('exact')
+        not ui.configbool(b'mq', b'keepchanges')
+        or opts.get(b'force')
+        or opts.get(b'exact')
     ):
         return opts
     opts = dict(opts)
-    opts['keep_changes'] = True
+    opts[b'keep_changes'] = True
     return opts
 
 
 @command(
-    "qdelete|qremove|qrm",
+    b"qdelete|qremove|qrm",
     [
-        ('k', 'keep', None, _('keep patch file')),
-        ('r', 'rev', [], _('stop managing a revision (DEPRECATED)'), _('REV')),
+        (b'k', b'keep', None, _(b'keep patch file')),
+        (
+            b'r',
+            b'rev',
+            [],
+            _(b'stop managing a revision (DEPRECATED)'),
+            _(b'REV'),
+        ),
     ],
-    _('hg qdelete [-k] [PATCH]...'),
+    _(b'hg qdelete [-k] [PATCH]...'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def delete(ui, repo, *patches, **opts):
@@ -2554,10 +2578,10 @@
 
 
 @command(
-    "qapplied",
-    [('1', 'last', None, _('show only the preceding applied patch'))]
+    b"qapplied",
+    [(b'1', b'last', None, _(b'show only the preceding applied patch'))]
     + seriesopts,
-    _('hg qapplied [-1] [-s] [PATCH]'),
+    _(b'hg qapplied [-1] [-s] [PATCH]'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def applied(ui, repo, patch=None, **opts):
@@ -2570,32 +2594,32 @@
 
     if patch:
         if patch not in q.series:
-            raise error.Abort(_("patch %s is not in series file") % patch)
+            raise error.Abort(_(b"patch %s is not in series file") % patch)
         end = q.series.index(patch) + 1
     else:
         end = q.seriesend(True)
 
-    if opts.get('last') and not end:
-        ui.write(_("no patches applied\n"))
+    if opts.get(b'last') and not end:
+        ui.write(_(b"no patches applied\n"))
         return 1
-    elif opts.get('last') and end == 1:
-        ui.write(_("only one patch applied\n"))
+    elif opts.get(b'last') and end == 1:
+        ui.write(_(b"only one patch applied\n"))
         return 1
-    elif opts.get('last'):
+    elif opts.get(b'last'):
         start = end - 2
         end = 1
     else:
         start = 0
 
     q.qseries(
-        repo, length=end, start=start, status='A', summary=opts.get('summary')
+        repo, length=end, start=start, status=b'A', summary=opts.get(b'summary')
     )
 
 
 @command(
-    "qunapplied",
-    [('1', 'first', None, _('show only the first patch'))] + seriesopts,
-    _('hg qunapplied [-1] [-s] [PATCH]'),
+    b"qunapplied",
+    [(b'1', b'first', None, _(b'show only the first patch'))] + seriesopts,
+    _(b'hg qunapplied [-1] [-s] [PATCH]'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def unapplied(ui, repo, patch=None, **opts):
@@ -2607,16 +2631,16 @@
     opts = pycompat.byteskwargs(opts)
     if patch:
         if patch not in q.series:
-            raise error.Abort(_("patch %s is not in series file") % patch)
+            raise error.Abort(_(b"patch %s is not in series file") % patch)
         start = q.series.index(patch) + 1
     else:
         start = q.seriesend(True)
 
-    if start == len(q.series) and opts.get('first'):
-        ui.write(_("all patches applied\n"))
+    if start == len(q.series) and opts.get(b'first'):
+        ui.write(_(b"all patches applied\n"))
         return 1
 
-    if opts.get('first'):
+    if opts.get(b'first'):
         length = 1
     else:
         length = None
@@ -2624,28 +2648,28 @@
         repo,
         start=start,
         length=length,
-        status='U',
-        summary=opts.get('summary'),
+        status=b'U',
+        summary=opts.get(b'summary'),
     )
 
 
 @command(
-    "qimport",
+    b"qimport",
     [
-        ('e', 'existing', None, _('import file in patch directory')),
-        ('n', 'name', '', _('name of patch file'), _('NAME')),
-        ('f', 'force', None, _('overwrite existing files')),
+        (b'e', b'existing', None, _(b'import file in patch directory')),
+        (b'n', b'name', b'', _(b'name of patch file'), _(b'NAME')),
+        (b'f', b'force', None, _(b'overwrite existing files')),
         (
-            'r',
-            'rev',
+            b'r',
+            b'rev',
             [],
-            _('place existing revisions under mq control'),
-            _('REV'),
+            _(b'place existing revisions under mq control'),
+            _(b'REV'),
         ),
-        ('g', 'git', None, _('use git extended diff format')),
-        ('P', 'push', None, _('qpush after importing')),
+        (b'g', b'git', None, _(b'use git extended diff format')),
+        (b'P', b'push', None, _(b'qpush after importing')),
     ],
-    _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'),
+    _(b'hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'),
     helpcategory=command.CATEGORY_IMPORT_EXPORT,
 )
 def qimport(ui, repo, *filename, **opts):
@@ -2689,16 +2713,16 @@
             imported = q.qimport(
                 repo,
                 filename,
-                patchname=opts.get('name'),
-                existing=opts.get('existing'),
-                force=opts.get('force'),
-                rev=opts.get('rev'),
-                git=opts.get('git'),
+                patchname=opts.get(b'name'),
+                existing=opts.get(b'existing'),
+                force=opts.get(b'force'),
+                rev=opts.get(b'rev'),
+                git=opts.get(b'git'),
             )
         finally:
             q.savedirty()
 
-    if imported and opts.get('push') and not opts.get('rev'):
+    if imported and opts.get(b'push') and not opts.get(b'rev'):
         return q.push(repo, imported[-1])
     return 0
 
@@ -2715,25 +2739,25 @@
     r = q.init(repo, create)
     q.savedirty()
     if r:
-        if not os.path.exists(r.wjoin('.hgignore')):
-            fp = r.wvfs('.hgignore', 'w')
-            fp.write('^\\.hg\n')
-            fp.write('^\\.mq\n')
-            fp.write('syntax: glob\n')
-            fp.write('status\n')
-            fp.write('guards\n')
+        if not os.path.exists(r.wjoin(b'.hgignore')):
+            fp = r.wvfs(b'.hgignore', b'w')
+            fp.write(b'^\\.hg\n')
+            fp.write(b'^\\.mq\n')
+            fp.write(b'syntax: glob\n')
+            fp.write(b'status\n')
+            fp.write(b'guards\n')
             fp.close()
-        if not os.path.exists(r.wjoin('series')):
-            r.wvfs('series', 'w').close()
-        r[None].add(['.hgignore', 'series'])
+        if not os.path.exists(r.wjoin(b'series')):
+            r.wvfs(b'series', b'w').close()
+        r[None].add([b'.hgignore', b'series'])
         commands.add(ui, r)
     return 0
 
 
 @command(
-    "qinit",
-    [('c', 'create-repo', None, _('create queue repository'))],
-    _('hg qinit [-c]'),
+    b"qinit",
+    [(b'c', b'create-repo', None, _(b'create queue repository'))],
+    _(b'hg qinit [-c]'),
     helpcategory=command.CATEGORY_REPO_CREATION,
     helpbasic=True,
 )
@@ -2752,26 +2776,31 @@
 
 
 @command(
-    "qclone",
+    b"qclone",
     [
-        ('', 'pull', None, _('use pull protocol to copy metadata')),
-        ('U', 'noupdate', None, _('do not update the new working directories')),
+        (b'', b'pull', None, _(b'use pull protocol to copy metadata')),
         (
-            '',
-            'uncompressed',
+            b'U',
+            b'noupdate',
             None,
-            _('use uncompressed transfer (fast over LAN)'),
+            _(b'do not update the new working directories'),
         ),
         (
-            'p',
-            'patches',
-            '',
-            _('location of source patch repository'),
-            _('REPO'),
+            b'',
+            b'uncompressed',
+            None,
+            _(b'use uncompressed transfer (fast over LAN)'),
+        ),
+        (
+            b'p',
+            b'patches',
+            b'',
+            _(b'location of source patch repository'),
+            _(b'REPO'),
         ),
     ]
     + cmdutil.remoteopts,
-    _('hg qclone [OPTION]... SOURCE [DEST]'),
+    _(b'hg qclone [OPTION]... SOURCE [DEST]'),
     helpcategory=command.CATEGORY_REPO_CREATION,
     norepo=True,
 )
@@ -2797,9 +2826,9 @@
     def patchdir(repo):
         """compute a patch repo url from a repo object"""
         url = repo.url()
-        if url.endswith('/'):
+        if url.endswith(b'/'):
             url = url[:-1]
-        return url + '/.hg/patches'
+        return url + b'/.hg/patches'
 
     # main repo (destination and sources)
     if dest is None:
@@ -2807,15 +2836,15 @@
     sr = hg.peer(ui, opts, ui.expandpath(source))
 
     # patches repo (source only)
-    if opts.get('patches'):
-        patchespath = ui.expandpath(opts.get('patches'))
+    if opts.get(b'patches'):
+        patchespath = ui.expandpath(opts.get(b'patches'))
     else:
         patchespath = patchdir(sr)
     try:
         hg.peer(ui, opts, patchespath)
     except error.RepoError:
         raise error.Abort(
-            _('versioned patch repository not found' ' (see init --mq)')
+            _(b'versioned patch repository not found' b' (see init --mq)')
         )
     qbase, destrev = None, None
     if sr.local():
@@ -2826,51 +2855,54 @@
                 heads = set(repo.heads())
                 destrev = list(heads.difference(repo.heads(qbase)))
                 destrev.append(repo.changelog.parents(qbase)[0])
-    elif sr.capable('lookup'):
+    elif sr.capable(b'lookup'):
         try:
-            qbase = sr.lookup('qbase')
+            qbase = sr.lookup(b'qbase')
         except error.RepoError:
             pass
 
-    ui.note(_('cloning main repository\n'))
+    ui.note(_(b'cloning main repository\n'))
     sr, dr = hg.clone(
         ui,
         opts,
         sr.url(),
         dest,
-        pull=opts.get('pull'),
+        pull=opts.get(b'pull'),
         revs=destrev,
         update=False,
-        stream=opts.get('uncompressed'),
+        stream=opts.get(b'uncompressed'),
     )
 
-    ui.note(_('cloning patch repository\n'))
+    ui.note(_(b'cloning patch repository\n'))
     hg.clone(
         ui,
         opts,
-        opts.get('patches') or patchdir(sr),
+        opts.get(b'patches') or patchdir(sr),
         patchdir(dr),
-        pull=opts.get('pull'),
-        update=not opts.get('noupdate'),
-        stream=opts.get('uncompressed'),
+        pull=opts.get(b'pull'),
+        update=not opts.get(b'noupdate'),
+        stream=opts.get(b'uncompressed'),
     )
 
     if dr.local():
         repo = dr.local()
         if qbase:
             ui.note(
-                _('stripping applied patches from destination ' 'repository\n')
+                _(
+                    b'stripping applied patches from destination '
+                    b'repository\n'
+                )
             )
             strip(ui, repo, [qbase], update=False, backup=None)
-        if not opts.get('noupdate'):
-            ui.note(_('updating destination repository\n'))
+        if not opts.get(b'noupdate'):
+            ui.note(_(b'updating destination repository\n'))
             hg.update(repo, repo.changelog.tip())
 
 
 @command(
-    "qcommit|qci",
-    commands.table["commit|ci"][1],
-    _('hg qcommit [OPTION]... [FILE]...'),
+    b"qcommit|qci",
+    commands.table[b"commit|ci"][1],
+    _(b'hg qcommit [OPTION]... [FILE]...'),
     helpcategory=command.CATEGORY_COMMITTING,
     inferrepo=True,
 )
@@ -2881,14 +2913,14 @@
     q = repo.mq
     r = q.qrepo()
     if not r:
-        raise error.Abort('no queue repository')
+        raise error.Abort(b'no queue repository')
     commands.commit(r.ui, r, *pats, **opts)
 
 
 @command(
-    "qseries",
-    [('m', 'missing', None, _('print patches not in series')),] + seriesopts,
-    _('hg qseries [-ms]'),
+    b"qseries",
+    [(b'm', b'missing', None, _(b'print patches not in series')),] + seriesopts,
+    _(b'hg qseries [-ms]'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def series(ui, repo, **opts):
@@ -2902,9 +2934,9 @@
 
 
 @command(
-    "qtop",
+    b"qtop",
     seriesopts,
-    _('hg qtop [-s]'),
+    _(b'hg qtop [-s]'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def top(ui, repo, **opts):
@@ -2922,18 +2954,18 @@
             repo,
             start=t - 1,
             length=1,
-            status='A',
+            status=b'A',
             summary=opts.get(r'summary'),
         )
     else:
-        ui.write(_("no patches applied\n"))
+        ui.write(_(b"no patches applied\n"))
         return 1
 
 
 @command(
-    "qnext",
+    b"qnext",
     seriesopts,
-    _('hg qnext [-s]'),
+    _(b'hg qnext [-s]'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def next(ui, repo, **opts):
@@ -2943,15 +2975,15 @@
     q = repo.mq
     end = q.seriesend()
     if end == len(q.series):
-        ui.write(_("all patches applied\n"))
+        ui.write(_(b"all patches applied\n"))
         return 1
     q.qseries(repo, start=end, length=1, summary=opts.get(r'summary'))
 
 
 @command(
-    "qprev",
+    b"qprev",
     seriesopts,
-    _('hg qprev [-s]'),
+    _(b'hg qprev [-s]'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def prev(ui, repo, **opts):
@@ -2961,38 +2993,38 @@
     q = repo.mq
     l = len(q.applied)
     if l == 1:
-        ui.write(_("only one patch applied\n"))
+        ui.write(_(b"only one patch applied\n"))
         return 1
     if not l:
-        ui.write(_("no patches applied\n"))
+        ui.write(_(b"no patches applied\n"))
         return 1
     idx = q.series.index(q.applied[-2].name)
     q.qseries(
-        repo, start=idx, length=1, status='A', summary=opts.get(r'summary')
+        repo, start=idx, length=1, status=b'A', summary=opts.get(r'summary')
     )
 
 
 def setupheaderopts(ui, opts):
-    if not opts.get('user') and opts.get('currentuser'):
-        opts['user'] = ui.username()
-    if not opts.get('date') and opts.get('currentdate'):
-        opts['date'] = "%d %d" % dateutil.makedate()
+    if not opts.get(b'user') and opts.get(b'currentuser'):
+        opts[b'user'] = ui.username()
+    if not opts.get(b'date') and opts.get(b'currentdate'):
+        opts[b'date'] = b"%d %d" % dateutil.makedate()
 
 
 @command(
-    "qnew",
+    b"qnew",
     [
-        ('e', 'edit', None, _('invoke editor on commit messages')),
-        ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
-        ('g', 'git', None, _('use git extended diff format')),
-        ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
-        ('u', 'user', '', _('add "From: <USER>" to patch'), _('USER')),
-        ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
-        ('d', 'date', '', _('add "Date: <DATE>" to patch'), _('DATE')),
+        (b'e', b'edit', None, _(b'invoke editor on commit messages')),
+        (b'f', b'force', None, _(b'import uncommitted changes (DEPRECATED)')),
+        (b'g', b'git', None, _(b'use git extended diff format')),
+        (b'U', b'currentuser', None, _(b'add "From: <current user>" to patch')),
+        (b'u', b'user', b'', _(b'add "From: <USER>" to patch'), _(b'USER')),
+        (b'D', b'currentdate', None, _(b'add "Date: <current date>" to patch')),
+        (b'd', b'date', b'', _(b'add "Date: <DATE>" to patch'), _(b'DATE')),
     ]
     + cmdutil.walkopts
     + cmdutil.commitopts,
-    _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
+    _(b'hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
     helpcategory=command.CATEGORY_COMMITTING,
     helpbasic=True,
     inferrepo=True,
@@ -3025,7 +3057,7 @@
     opts = pycompat.byteskwargs(opts)
     msg = cmdutil.logmessage(ui, opts)
     q = repo.mq
-    opts['msg'] = msg
+    opts[b'msg'] = msg
     setupheaderopts(ui, opts)
     q.new(repo, patch, *args, **pycompat.strkwargs(opts))
     q.savedirty()
@@ -3033,46 +3065,46 @@
 
 
 @command(
-    "qrefresh",
+    b"qrefresh",
     [
-        ('e', 'edit', None, _('invoke editor on commit messages')),
-        ('g', 'git', None, _('use git extended diff format')),
+        (b'e', b'edit', None, _(b'invoke editor on commit messages')),
+        (b'g', b'git', None, _(b'use git extended diff format')),
         (
-            's',
-            'short',
+            b's',
+            b'short',
             None,
-            _('refresh only files already in the patch and specified files'),
+            _(b'refresh only files already in the patch and specified files'),
         ),
         (
-            'U',
-            'currentuser',
+            b'U',
+            b'currentuser',
             None,
-            _('add/update author field in patch with current user'),
+            _(b'add/update author field in patch with current user'),
         ),
         (
-            'u',
-            'user',
-            '',
-            _('add/update author field in patch with given user'),
-            _('USER'),
+            b'u',
+            b'user',
+            b'',
+            _(b'add/update author field in patch with given user'),
+            _(b'USER'),
         ),
         (
-            'D',
-            'currentdate',
+            b'D',
+            b'currentdate',
             None,
-            _('add/update date field in patch with current date'),
+            _(b'add/update date field in patch with current date'),
         ),
         (
-            'd',
-            'date',
-            '',
-            _('add/update date field in patch with given date'),
-            _('DATE'),
+            b'd',
+            b'date',
+            b'',
+            _(b'add/update date field in patch with given date'),
+            _(b'DATE'),
         ),
     ]
     + cmdutil.walkopts
     + cmdutil.commitopts,
-    _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
+    _(b'hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
     helpcategory=command.CATEGORY_COMMITTING,
     helpbasic=True,
     inferrepo=True,
@@ -3109,9 +3141,9 @@
 
 
 @command(
-    "qdiff",
+    b"qdiff",
     cmdutil.diffopts + cmdutil.diffopts2 + cmdutil.walkopts,
-    _('hg qdiff [OPTION]... [FILE]...'),
+    _(b'hg qdiff [OPTION]... [FILE]...'),
     helpcategory=command.CATEGORY_FILE_CONTENTS,
     helpbasic=True,
     inferrepo=True,
@@ -3131,19 +3163,19 @@
 
     Returns 0 on success.
     """
-    ui.pager('qdiff')
+    ui.pager(b'qdiff')
     repo.mq.diff(repo, pats, pycompat.byteskwargs(opts))
     return 0
 
 
 @command(
-    'qfold',
+    b'qfold',
     [
-        ('e', 'edit', None, _('invoke editor on commit messages')),
-        ('k', 'keep', None, _('keep folded patch files')),
+        (b'e', b'edit', None, _(b'invoke editor on commit messages')),
+        (b'k', b'keep', None, _(b'keep folded patch files')),
     ]
     + cmdutil.commitopts,
-    _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'),
+    _(b'hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'),
     helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
 )
 def fold(ui, repo, *files, **opts):
@@ -3163,23 +3195,23 @@
     opts = pycompat.byteskwargs(opts)
     q = repo.mq
     if not files:
-        raise error.Abort(_('qfold requires at least one patch name'))
+        raise error.Abort(_(b'qfold requires at least one patch name'))
     if not q.checktoppatch(repo)[0]:
-        raise error.Abort(_('no patches applied'))
+        raise error.Abort(_(b'no patches applied'))
     q.checklocalchanges(repo)
 
     message = cmdutil.logmessage(ui, opts)
 
-    parent = q.lookup('qtip')
+    parent = q.lookup(b'qtip')
     patches = []
     messages = []
     for f in files:
         p = q.lookup(f)
         if p in patches or p == parent:
-            ui.warn(_('skipping already folded patch %s\n') % p)
+            ui.warn(_(b'skipping already folded patch %s\n') % p)
         if q.isapplied(p):
             raise error.Abort(
-                _('qfold cannot fold already applied patch %s') % p
+                _(b'qfold cannot fold already applied patch %s') % p
             )
         patches.append(p)
 
@@ -3191,7 +3223,7 @@
         pf = q.join(p)
         (patchsuccess, files, fuzz) = q.patch(repo, pf)
         if not patchsuccess:
-            raise error.Abort(_('error folding patch %s') % p)
+            raise error.Abort(_(b'error folding patch %s') % p)
 
     if not message:
         ph = patchheader(q.join(parent), q.plainmode)
@@ -3199,9 +3231,9 @@
         for msg in messages:
             if msg:
                 if message:
-                    message.append('* * *')
+                    message.append(b'* * *')
                 message.extend(msg)
-        message = '\n'.join(message)
+        message = b'\n'.join(message)
 
     diffopts = q.patchopts(q.diffopts(), *patches)
     with repo.wlock():
@@ -3209,21 +3241,26 @@
             repo,
             msg=message,
             git=diffopts.git,
-            edit=opts.get('edit'),
-            editform='mq.qfold',
+            edit=opts.get(b'edit'),
+            editform=b'mq.qfold',
         )
         q.delete(repo, patches, opts)
         q.savedirty()
 
 
 @command(
-    "qgoto",
+    b"qgoto",
     [
-        ('', 'keep-changes', None, _('tolerate non-conflicting local changes')),
-        ('f', 'force', None, _('overwrite any local changes')),
-        ('', 'no-backup', None, _('do not save backup copies of files')),
+        (
+            b'',
+            b'keep-changes',
+            None,
+            _(b'tolerate non-conflicting local changes'),
+        ),
+        (b'f', b'force', None, _(b'overwrite any local changes')),
+        (b'', b'no-backup', None, _(b'do not save backup copies of files')),
     ],
-    _('hg qgoto [OPTION]... PATCH'),
+    _(b'hg qgoto [OPTION]... PATCH'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def goto(ui, repo, patch, **opts):
@@ -3234,13 +3271,13 @@
     opts = fixkeepchangesopts(ui, opts)
     q = repo.mq
     patch = q.lookup(patch)
-    nobackup = opts.get('no_backup')
-    keepchanges = opts.get('keep_changes')
+    nobackup = opts.get(b'no_backup')
+    keepchanges = opts.get(b'keep_changes')
     if q.isapplied(patch):
         ret = q.pop(
             repo,
             patch,
-            force=opts.get('force'),
+            force=opts.get(b'force'),
             nobackup=nobackup,
             keepchanges=keepchanges,
         )
@@ -3248,7 +3285,7 @@
         ret = q.push(
             repo,
             patch,
-            force=opts.get('force'),
+            force=opts.get(b'force'),
             nobackup=nobackup,
             keepchanges=keepchanges,
         )
@@ -3257,12 +3294,12 @@
 
 
 @command(
-    "qguard",
+    b"qguard",
     [
-        ('l', 'list', None, _('list all patches and guards')),
-        ('n', 'none', None, _('drop all guards')),
+        (b'l', b'list', None, _(b'list all patches and guards')),
+        (b'n', b'none', None, _(b'drop all guards')),
     ],
-    _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'),
+    _(b'hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def guard(ui, repo, *args, **opts):
@@ -3289,26 +3326,26 @@
     '''
 
     def status(idx):
-        guards = q.seriesguards[idx] or ['unguarded']
+        guards = q.seriesguards[idx] or [b'unguarded']
         if q.series[idx] in applied:
-            state = 'applied'
+            state = b'applied'
         elif q.pushable(idx)[0]:
-            state = 'unapplied'
+            state = b'unapplied'
         else:
-            state = 'guarded'
-        label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
-        ui.write('%s: ' % ui.label(q.series[idx], label))
+            state = b'guarded'
+        label = b'qguard.patch qguard.%s qseries.%s' % (state, state)
+        ui.write(b'%s: ' % ui.label(q.series[idx], label))
 
         for i, guard in enumerate(guards):
-            if guard.startswith('+'):
-                ui.write(guard, label='qguard.positive')
-            elif guard.startswith('-'):
-                ui.write(guard, label='qguard.negative')
+            if guard.startswith(b'+'):
+                ui.write(guard, label=b'qguard.positive')
+            elif guard.startswith(b'-'):
+                ui.write(guard, label=b'qguard.negative')
             else:
-                ui.write(guard, label='qguard.unguarded')
+                ui.write(guard, label=b'qguard.unguarded')
             if i != len(guards) - 1:
-                ui.write(' ')
-        ui.write('\n')
+                ui.write(b' ')
+        ui.write(b'\n')
 
     q = repo.mq
     applied = set(p.name for p in q.applied)
@@ -3317,23 +3354,23 @@
     if opts.get(r'list'):
         if args or opts.get(r'none'):
             raise error.Abort(
-                _('cannot mix -l/--list with options or ' 'arguments')
+                _(b'cannot mix -l/--list with options or ' b'arguments')
             )
         for i in pycompat.xrange(len(q.series)):
             status(i)
         return
-    if not args or args[0][0:1] in '-+':
+    if not args or args[0][0:1] in b'-+':
         if not q.applied:
-            raise error.Abort(_('no patches applied'))
+            raise error.Abort(_(b'no patches applied'))
         patch = q.applied[-1].name
-    if patch is None and args[0][0:1] not in '-+':
+    if patch is None and args[0][0:1] not in b'-+':
         patch = args.pop(0)
     if patch is None:
-        raise error.Abort(_('no patch to work with'))
+        raise error.Abort(_(b'no patch to work with'))
     if args or opts.get(r'none'):
         idx = q.findseries(patch)
         if idx is None:
-            raise error.Abort(_('no patch named %s') % patch)
+            raise error.Abort(_(b'no patch named %s') % patch)
         q.setguards(idx, args)
         q.savedirty()
     else:
@@ -3341,9 +3378,9 @@
 
 
 @command(
-    "qheader",
+    b"qheader",
     [],
-    _('hg qheader [PATCH]'),
+    _(b'hg qheader [PATCH]'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def header(ui, repo, patch=None):
@@ -3356,18 +3393,18 @@
         patch = q.lookup(patch)
     else:
         if not q.applied:
-            ui.write(_('no patches applied\n'))
+            ui.write(_(b'no patches applied\n'))
             return 1
-        patch = q.lookup('qtip')
+        patch = q.lookup(b'qtip')
     ph = patchheader(q.join(patch), q.plainmode)
 
-    ui.write('\n'.join(ph.message) + '\n')
+    ui.write(b'\n'.join(ph.message) + b'\n')
 
 
 def lastsavename(path):
     (directory, base) = os.path.split(path)
     names = os.listdir(directory)
-    namere = re.compile("%s.([0-9]+)" % base)
+    namere = re.compile(b"%s.([0-9]+)" % base)
     maxindex = None
     maxname = None
     for f in names:
@@ -3386,29 +3423,39 @@
     (last, index) = lastsavename(path)
     if last is None:
         index = 0
-    newpath = path + ".%d" % (index + 1)
+    newpath = path + b".%d" % (index + 1)
     return newpath
 
 
 @command(
-    "qpush",
+    b"qpush",
     [
-        ('', 'keep-changes', None, _('tolerate non-conflicting local changes')),
-        ('f', 'force', None, _('apply on top of local changes')),
         (
-            'e',
-            'exact',
+            b'',
+            b'keep-changes',
+            None,
+            _(b'tolerate non-conflicting local changes'),
+        ),
+        (b'f', b'force', None, _(b'apply on top of local changes')),
+        (
+            b'e',
+            b'exact',
             None,
-            _('apply the target patch to its recorded parent'),
+            _(b'apply the target patch to its recorded parent'),
         ),
-        ('l', 'list', None, _('list patch name in commit text')),
-        ('a', 'all', None, _('apply all patches')),
-        ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
-        ('n', 'name', '', _('merge queue name (DEPRECATED)'), _('NAME')),
-        ('', 'move', None, _('reorder patch series and apply only the patch')),
-        ('', 'no-backup', None, _('do not save backup copies of files')),
+        (b'l', b'list', None, _(b'list patch name in commit text')),
+        (b'a', b'all', None, _(b'apply all patches')),
+        (b'm', b'merge', None, _(b'merge from another queue (DEPRECATED)')),
+        (b'n', b'name', b'', _(b'merge queue name (DEPRECATED)'), _(b'NAME')),
+        (
+            b'',
+            b'move',
+            None,
+            _(b'reorder patch series and apply only the patch'),
+        ),
+        (b'', b'no-backup', None, _(b'do not save backup copies of files')),
     ],
-    _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'),
+    _(b'hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
     helpbasic=True,
 )
@@ -3427,41 +3474,46 @@
 
     opts = pycompat.byteskwargs(opts)
     opts = fixkeepchangesopts(ui, opts)
-    if opts.get('merge'):
-        if opts.get('name'):
-            newpath = repo.vfs.join(opts.get('name'))
+    if opts.get(b'merge'):
+        if opts.get(b'name'):
+            newpath = repo.vfs.join(opts.get(b'name'))
         else:
             newpath, i = lastsavename(q.path)
         if not newpath:
-            ui.warn(_("no saved queues found, please use -n\n"))
+            ui.warn(_(b"no saved queues found, please use -n\n"))
             return 1
         mergeq = queue(ui, repo.baseui, repo.path, newpath)
-        ui.warn(_("merging with queue at: %s\n") % mergeq.path)
+        ui.warn(_(b"merging with queue at: %s\n") % mergeq.path)
     ret = q.push(
         repo,
         patch,
-        force=opts.get('force'),
-        list=opts.get('list'),
+        force=opts.get(b'force'),
+        list=opts.get(b'list'),
         mergeq=mergeq,
-        all=opts.get('all'),
-        move=opts.get('move'),
-        exact=opts.get('exact'),
-        nobackup=opts.get('no_backup'),
-        keepchanges=opts.get('keep_changes'),
+        all=opts.get(b'all'),
+        move=opts.get(b'move'),
+        exact=opts.get(b'exact'),
+        nobackup=opts.get(b'no_backup'),
+        keepchanges=opts.get(b'keep_changes'),
     )
     return ret
 
 
 @command(
-    "qpop",
+    b"qpop",
     [
-        ('a', 'all', None, _('pop all patches')),
-        ('n', 'name', '', _('queue name to pop (DEPRECATED)'), _('NAME')),
-        ('', 'keep-changes', None, _('tolerate non-conflicting local changes')),
-        ('f', 'force', None, _('forget any local changes to patched files')),
-        ('', 'no-backup', None, _('do not save backup copies of files')),
+        (b'a', b'all', None, _(b'pop all patches')),
+        (b'n', b'name', b'', _(b'queue name to pop (DEPRECATED)'), _(b'NAME')),
+        (
+            b'',
+            b'keep-changes',
+            None,
+            _(b'tolerate non-conflicting local changes'),
+        ),
+        (b'f', b'force', None, _(b'forget any local changes to patched files')),
+        (b'', b'no-backup', None, _(b'do not save backup copies of files')),
     ],
-    _('hg qpop [-a] [-f] [PATCH | INDEX]'),
+    _(b'hg qpop [-a] [-f] [PATCH | INDEX]'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
     helpbasic=True,
 )
@@ -3482,29 +3534,29 @@
     opts = pycompat.byteskwargs(opts)
     opts = fixkeepchangesopts(ui, opts)
     localupdate = True
-    if opts.get('name'):
-        q = queue(ui, repo.baseui, repo.path, repo.vfs.join(opts.get('name')))
-        ui.warn(_('using patch queue: %s\n') % q.path)
+    if opts.get(b'name'):
+        q = queue(ui, repo.baseui, repo.path, repo.vfs.join(opts.get(b'name')))
+        ui.warn(_(b'using patch queue: %s\n') % q.path)
         localupdate = False
     else:
         q = repo.mq
     ret = q.pop(
         repo,
         patch,
-        force=opts.get('force'),
+        force=opts.get(b'force'),
         update=localupdate,
-        all=opts.get('all'),
-        nobackup=opts.get('no_backup'),
-        keepchanges=opts.get('keep_changes'),
+        all=opts.get(b'all'),
+        nobackup=opts.get(b'no_backup'),
+        keepchanges=opts.get(b'keep_changes'),
     )
     q.savedirty()
     return ret
 
 
 @command(
-    "qrename|qmv",
+    b"qrename|qmv",
     [],
-    _('hg qrename PATCH1 [PATCH2]'),
+    _(b'hg qrename PATCH1 [PATCH2]'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def rename(ui, repo, patch, name=None, **opts):
@@ -3523,19 +3575,19 @@
         patch = q.lookup(patch)
     else:
         if not q.applied:
-            ui.write(_('no patches applied\n'))
+            ui.write(_(b'no patches applied\n'))
             return
-        patch = q.lookup('qtip')
+        patch = q.lookup(b'qtip')
     absdest = q.join(name)
     if os.path.isdir(absdest):
         name = normname(os.path.join(name, os.path.basename(patch)))
         absdest = q.join(name)
     q.checkpatchname(name)
 
-    ui.note(_('renaming %s to %s\n') % (patch, name))
+    ui.note(_(b'renaming %s to %s\n') % (patch, name))
     i = q.findseries(patch)
     guards = q.guard_re.findall(q.fullseries[i])
-    q.fullseries[i] = name + ''.join([' #' + g for g in guards])
+    q.fullseries[i] = name + b''.join([b' #' + g for g in guards])
     q.parseseries()
     q.seriesdirty = True
 
@@ -3552,7 +3604,7 @@
     if r and patch in r.dirstate:
         wctx = r[None]
         with r.wlock():
-            if r.dirstate[patch] == 'a':
+            if r.dirstate[patch] == b'a':
                 r.dirstate.drop(patch)
                 r.dirstate.add(name)
             else:
@@ -3563,12 +3615,12 @@
 
 
 @command(
-    "qrestore",
+    b"qrestore",
     [
-        ('d', 'delete', None, _('delete save entry')),
-        ('u', 'update', None, _('update queue working directory')),
+        (b'd', b'delete', None, _(b'delete save entry')),
+        (b'u', b'update', None, _(b'update queue working directory')),
     ],
-    _('hg qrestore [-d] [-u] REV'),
+    _(b'hg qrestore [-d] [-u] REV'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def restore(ui, repo, rev, **opts):
@@ -3585,15 +3637,15 @@
 
 
 @command(
-    "qsave",
+    b"qsave",
     [
-        ('c', 'copy', None, _('copy patch directory')),
-        ('n', 'name', '', _('copy directory name'), _('NAME')),
-        ('e', 'empty', None, _('clear queue status file')),
-        ('f', 'force', None, _('force copy')),
+        (b'c', b'copy', None, _(b'copy patch directory')),
+        (b'n', b'name', b'', _(b'copy directory name'), _(b'NAME')),
+        (b'e', b'empty', None, _(b'clear queue status file')),
+        (b'f', b'force', None, _(b'force copy')),
     ]
     + cmdutil.commitopts,
-    _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
+    _(b'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def save(ui, repo, **opts):
@@ -3607,25 +3659,26 @@
     if ret:
         return ret
     q.savedirty()  # save to .hg/patches before copying
-    if opts.get('copy'):
+    if opts.get(b'copy'):
         path = q.path
-        if opts.get('name'):
-            newpath = os.path.join(q.basepath, opts.get('name'))
+        if opts.get(b'name'):
+            newpath = os.path.join(q.basepath, opts.get(b'name'))
             if os.path.exists(newpath):
                 if not os.path.isdir(newpath):
                     raise error.Abort(
-                        _('destination %s exists and is not ' 'a directory')
+                        _(b'destination %s exists and is not ' b'a directory')
                         % newpath
                     )
-                if not opts.get('force'):
+                if not opts.get(b'force'):
                     raise error.Abort(
-                        _('destination %s exists, ' 'use -f to force') % newpath
+                        _(b'destination %s exists, ' b'use -f to force')
+                        % newpath
                     )
         else:
             newpath = savename(path)
-        ui.warn(_("copy %s to %s\n") % (path, newpath))
+        ui.warn(_(b"copy %s to %s\n") % (path, newpath))
         util.copyfiles(path, newpath)
-    if opts.get('empty'):
+    if opts.get(b'empty'):
         del q.applied[:]
         q.applieddirty = True
         q.savedirty()
@@ -3633,14 +3686,14 @@
 
 
 @command(
-    "qselect",
+    b"qselect",
     [
-        ('n', 'none', None, _('disable all guards')),
-        ('s', 'series', None, _('list all guards in series file')),
-        ('', 'pop', None, _('pop to before first guarded applied patch')),
-        ('', 'reapply', None, _('pop, then reapply patches')),
+        (b'n', b'none', None, _(b'disable all guards')),
+        (b's', b'series', None, _(b'list all guards in series file')),
+        (b'', b'pop', None, _(b'pop to before first guarded applied patch')),
+        (b'', b'reapply', None, _(b'pop, then reapply patches')),
     ],
-    _('hg qselect [OPTION]... [GUARD]...'),
+    _(b'hg qselect [OPTION]... [GUARD]...'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def select(ui, repo, *args, **opts):
@@ -3682,7 +3735,7 @@
     opts = pycompat.byteskwargs(opts)
     guards = q.active()
     pushable = lambda i: q.pushable(q.applied[i].name)[0]
-    if args or opts.get('none'):
+    if args or opts.get(b'none'):
         old_unapplied = q.unapplied(repo)
         old_guarded = [
             i for i in pycompat.xrange(len(q.applied)) if not pushable(i)
@@ -3690,8 +3743,8 @@
         q.setactive(args)
         q.savedirty()
         if not args:
-            ui.status(_('guards deactivated\n'))
-        if not opts.get('pop') and not opts.get('reapply'):
+            ui.status(_(b'guards deactivated\n'))
+        if not opts.get(b'pop') and not opts.get(b'reapply'):
             unapplied = q.unapplied(repo)
             guarded = [
                 i for i in pycompat.xrange(len(q.applied)) if not pushable(i)
@@ -3699,20 +3752,20 @@
             if len(unapplied) != len(old_unapplied):
                 ui.status(
                     _(
-                        'number of unguarded, unapplied patches has '
-                        'changed from %d to %d\n'
+                        b'number of unguarded, unapplied patches has '
+                        b'changed from %d to %d\n'
                     )
                     % (len(old_unapplied), len(unapplied))
                 )
             if len(guarded) != len(old_guarded):
                 ui.status(
                     _(
-                        'number of guarded, applied patches has changed '
-                        'from %d to %d\n'
+                        b'number of guarded, applied patches has changed '
+                        b'from %d to %d\n'
                     )
                     % (len(old_guarded), len(guarded))
                 )
-    elif opts.get('series'):
+    elif opts.get(b'series'):
         guards = {}
         noguards = 0
         for gs in q.seriesguards:
@@ -3722,29 +3775,29 @@
                 guards.setdefault(g, 0)
                 guards[g] += 1
         if ui.verbose:
-            guards['NONE'] = noguards
+            guards[b'NONE'] = noguards
         guards = list(guards.items())
         guards.sort(key=lambda x: x[0][1:])
         if guards:
-            ui.note(_('guards in series file:\n'))
+            ui.note(_(b'guards in series file:\n'))
             for guard, count in guards:
-                ui.note('%2d  ' % count)
-                ui.write(guard, '\n')
+                ui.note(b'%2d  ' % count)
+                ui.write(guard, b'\n')
         else:
-            ui.note(_('no guards in series file\n'))
+            ui.note(_(b'no guards in series file\n'))
     else:
         if guards:
-            ui.note(_('active guards:\n'))
+            ui.note(_(b'active guards:\n'))
             for g in guards:
-                ui.write(g, '\n')
+                ui.write(g, b'\n')
         else:
-            ui.write(_('no active guards\n'))
-    reapply = opts.get('reapply') and q.applied and q.applied[-1].name
+            ui.write(_(b'no active guards\n'))
+    reapply = opts.get(b'reapply') and q.applied and q.applied[-1].name
     popped = False
-    if opts.get('pop') or opts.get('reapply'):
+    if opts.get(b'pop') or opts.get(b'reapply'):
         for i in pycompat.xrange(len(q.applied)):
             if not pushable(i):
-                ui.status(_('popping guarded patches\n'))
+                ui.status(_(b'popping guarded patches\n'))
                 popped = True
                 if i == 0:
                     q.pop(repo, all=True)
@@ -3754,16 +3807,16 @@
     if popped:
         try:
             if reapply:
-                ui.status(_('reapplying unguarded patches\n'))
+                ui.status(_(b'reapplying unguarded patches\n'))
                 q.push(repo, reapply)
         finally:
             q.savedirty()
 
 
 @command(
-    "qfinish",
-    [('a', 'applied', None, _('finish all applied changesets'))],
-    _('hg qfinish [-a] [REV]...'),
+    b"qfinish",
+    [(b'a', b'applied', None, _(b'finish all applied changesets'))],
+    _(b'hg qfinish [-a] [REV]...'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def finish(ui, repo, *revrange, **opts):
@@ -3785,18 +3838,18 @@
     Returns 0 on success.
     """
     if not opts.get(r'applied') and not revrange:
-        raise error.Abort(_('no revisions specified'))
+        raise error.Abort(_(b'no revisions specified'))
     elif opts.get(r'applied'):
-        revrange = ('qbase::qtip',) + revrange
+        revrange = (b'qbase::qtip',) + revrange
 
     q = repo.mq
     if not q.applied:
-        ui.status(_('no patches applied\n'))
+        ui.status(_(b'no patches applied\n'))
         return 0
 
     revs = scmutil.revrange(repo, revrange)
-    if repo['.'].rev() in revs and repo[None].files():
-        ui.warn(_('warning: uncommitted changes in the working directory\n'))
+    if repo[b'.'].rev() in revs and repo[None].files():
+        ui.warn(_(b'warning: uncommitted changes in the working directory\n'))
     # queue.finish may changes phases but leave the responsibility to lock the
     # repo to the caller to avoid deadlock with wlock. This command code is
     # responsibility for this locking.
@@ -3807,16 +3860,16 @@
 
 
 @command(
-    "qqueue",
+    b"qqueue",
     [
-        ('l', 'list', False, _('list all available queues')),
-        ('', 'active', False, _('print name of active queue')),
-        ('c', 'create', False, _('create new queue')),
-        ('', 'rename', False, _('rename active queue')),
-        ('', 'delete', False, _('delete reference to queue')),
-        ('', 'purge', False, _('delete queue, and remove patch dir')),
+        (b'l', b'list', False, _(b'list all available queues')),
+        (b'', b'active', False, _(b'print name of active queue')),
+        (b'c', b'create', False, _(b'create new queue')),
+        (b'', b'rename', False, _(b'rename active queue')),
+        (b'', b'delete', False, _(b'delete reference to queue')),
+        (b'', b'purge', False, _(b'delete queue, and remove patch dir')),
     ],
-    _('[OPTION] [QUEUE]'),
+    _(b'[OPTION] [QUEUE]'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def qqueue(ui, repo, name=None, **opts):
@@ -3841,19 +3894,19 @@
     Returns 0 on success.
     '''
     q = repo.mq
-    _defaultqueue = 'patches'
-    _allqueues = 'patches.queues'
-    _activequeue = 'patches.queue'
+    _defaultqueue = b'patches'
+    _allqueues = b'patches.queues'
+    _activequeue = b'patches.queue'
 
     def _getcurrent():
         cur = os.path.basename(q.path)
-        if cur.startswith('patches-'):
+        if cur.startswith(b'patches-'):
             cur = cur[8:]
         return cur
 
     def _noqueues():
         try:
-            fh = repo.vfs(_allqueues, 'r')
+            fh = repo.vfs(_allqueues, b'r')
             fh.close()
         except IOError:
             return True
@@ -3864,7 +3917,7 @@
         current = _getcurrent()
 
         try:
-            fh = repo.vfs(_allqueues, 'r')
+            fh = repo.vfs(_allqueues, b'r')
             queues = [queue.strip() for queue in fh if queue.strip()]
             fh.close()
             if current not in queues:
@@ -3878,112 +3931,112 @@
         if q.applied:
             raise error.Abort(
                 _(
-                    'new queue created, but cannot make active '
-                    'as patches are applied'
+                    b'new queue created, but cannot make active '
+                    b'as patches are applied'
                 )
             )
         _setactivenocheck(name)
 
     def _setactivenocheck(name):
-        fh = repo.vfs(_activequeue, 'w')
-        if name != 'patches':
+        fh = repo.vfs(_activequeue, b'w')
+        if name != b'patches':
             fh.write(name)
         fh.close()
 
     def _addqueue(name):
-        fh = repo.vfs(_allqueues, 'a')
-        fh.write('%s\n' % (name,))
+        fh = repo.vfs(_allqueues, b'a')
+        fh.write(b'%s\n' % (name,))
         fh.close()
 
     def _queuedir(name):
-        if name == 'patches':
-            return repo.vfs.join('patches')
+        if name == b'patches':
+            return repo.vfs.join(b'patches')
         else:
-            return repo.vfs.join('patches-' + name)
+            return repo.vfs.join(b'patches-' + name)
 
     def _validname(name):
         for n in name:
-            if n in ':\\/.':
+            if n in b':\\/.':
                 return False
         return True
 
     def _delete(name):
         if name not in existing:
-            raise error.Abort(_('cannot delete queue that does not exist'))
+            raise error.Abort(_(b'cannot delete queue that does not exist'))
 
         current = _getcurrent()
 
         if name == current:
-            raise error.Abort(_('cannot delete currently active queue'))
-
-        fh = repo.vfs('patches.queues.new', 'w')
+            raise error.Abort(_(b'cannot delete currently active queue'))
+
+        fh = repo.vfs(b'patches.queues.new', b'w')
         for queue in existing:
             if queue == name:
                 continue
-            fh.write('%s\n' % (queue,))
+            fh.write(b'%s\n' % (queue,))
         fh.close()
-        repo.vfs.rename('patches.queues.new', _allqueues)
+        repo.vfs.rename(b'patches.queues.new', _allqueues)
 
     opts = pycompat.byteskwargs(opts)
-    if not name or opts.get('list') or opts.get('active'):
+    if not name or opts.get(b'list') or opts.get(b'active'):
         current = _getcurrent()
-        if opts.get('active'):
-            ui.write('%s\n' % (current,))
+        if opts.get(b'active'):
+            ui.write(b'%s\n' % (current,))
             return
         for queue in _getqueues():
-            ui.write('%s' % (queue,))
+            ui.write(b'%s' % (queue,))
             if queue == current and not ui.quiet:
-                ui.write(_(' (active)\n'))
+                ui.write(_(b' (active)\n'))
             else:
-                ui.write('\n')
+                ui.write(b'\n')
         return
 
     if not _validname(name):
         raise error.Abort(
-            _('invalid queue name, may not contain the characters ":\\/."')
+            _(b'invalid queue name, may not contain the characters ":\\/."')
         )
 
     with repo.wlock():
         existing = _getqueues()
 
-        if opts.get('create'):
+        if opts.get(b'create'):
             if name in existing:
-                raise error.Abort(_('queue "%s" already exists') % name)
+                raise error.Abort(_(b'queue "%s" already exists') % name)
             if _noqueues():
                 _addqueue(_defaultqueue)
             _addqueue(name)
             _setactive(name)
-        elif opts.get('rename'):
+        elif opts.get(b'rename'):
             current = _getcurrent()
             if name == current:
                 raise error.Abort(
-                    _('can\'t rename "%s" to its current name') % name
+                    _(b'can\'t rename "%s" to its current name') % name
                 )
             if name in existing:
-                raise error.Abort(_('queue "%s" already exists') % name)
+                raise error.Abort(_(b'queue "%s" already exists') % name)
 
             olddir = _queuedir(current)
             newdir = _queuedir(name)
 
             if os.path.exists(newdir):
                 raise error.Abort(
-                    _('non-queue directory "%s" already exists') % newdir
+                    _(b'non-queue directory "%s" already exists') % newdir
                 )
 
-            fh = repo.vfs('patches.queues.new', 'w')
+            fh = repo.vfs(b'patches.queues.new', b'w')
             for queue in existing:
                 if queue == current:
-                    fh.write('%s\n' % (name,))
+                    fh.write(b'%s\n' % (name,))
                     if os.path.exists(olddir):
                         util.rename(olddir, newdir)
                 else:
-                    fh.write('%s\n' % (queue,))
+                    fh.write(b'%s\n' % (queue,))
             fh.close()
-            repo.vfs.rename('patches.queues.new', _allqueues)
+            repo.vfs.rename(b'patches.queues.new', _allqueues)
             _setactivenocheck(name)
-        elif opts.get('delete'):
+        elif opts.get(b'delete'):
             _delete(name)
-        elif opts.get('purge'):
+        elif opts.get(b'purge'):
             if name in existing:
                 _delete(name)
             qdir = _queuedir(name)
@@ -3991,14 +4044,14 @@
                 shutil.rmtree(qdir)
         else:
             if name not in existing:
-                raise error.Abort(_('use --create to create a new queue'))
+                raise error.Abort(_(b'use --create to create a new queue'))
             _setactive(name)
 
 
 def mqphasedefaults(repo, roots):
     """callback used to set mq changeset as secret when no phase data exists"""
     if repo.mq.applied:
-        if repo.ui.configbool('mq', 'secret'):
+        if repo.ui.configbool(b'mq', b'secret'):
             mqphase = phases.secret
         else:
             mqphase = phases.draft
@@ -4028,7 +4081,7 @@
 
         def commit(
             self,
-            text="",
+            text=b"",
             user=None,
             date=None,
             match=None,
@@ -4039,7 +4092,7 @@
             if extra is None:
                 extra = {}
             self.abortifwdirpatched(
-                _('cannot commit over an applied mq patch'), force
+                _(b'cannot commit over an applied mq patch'), force
             )
 
             return super(mqrepo, self).commit(
@@ -4062,7 +4115,7 @@
                 # looking for pushed and shared changeset
                 for node in outapplied:
                     if self[node].phase() < phases.secret:
-                        raise error.Abort(_('source has mq patches applied'))
+                        raise error.Abort(_(b'source has mq patches applied'))
                 # no non-secret patches pushed
             super(mqrepo, self).checkpush(pushop)
 
@@ -4081,7 +4134,7 @@
                 self.unfiltered().changelog.rev(mqtags[-1][0])
             except error.LookupError:
                 self.ui.warn(
-                    _('mq status file refers to unknown node %s\n')
+                    _(b'mq status file refers to unknown node %s\n')
                     % short(mqtags[-1][0])
                 )
                 return result
@@ -4092,14 +4145,14 @@
             if not mqtags:
                 return result
 
-            mqtags.append((mqtags[-1][0], 'qtip'))
-            mqtags.append((mqtags[0][0], 'qbase'))
-            mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
+            mqtags.append((mqtags[-1][0], b'qtip'))
+            mqtags.append((mqtags[0][0], b'qbase'))
+            mqtags.append((self.changelog.parents(mqtags[0][0])[0], b'qparent'))
             tags = result[0]
             for patch in mqtags:
                 if patch[1] in tags:
                     self.ui.warn(
-                        _('tag %s overrides mq patch of the same ' 'name\n')
+                        _(b'tag %s overrides mq patch of the same ' b'name\n')
                         % patch[1]
                     )
                 else:
@@ -4114,11 +4167,11 @@
 
 
 def mqimport(orig, ui, repo, *args, **kwargs):
-    if util.safehasattr(repo, 'abortifwdirpatched') and not kwargs.get(
+    if util.safehasattr(repo, b'abortifwdirpatched') and not kwargs.get(
         r'no_commit', False
     ):
         repo.abortifwdirpatched(
-            _('cannot import over an applied patch'), kwargs.get(r'force')
+            _(b'cannot import over an applied patch'), kwargs.get(r'force')
         )
     return orig(ui, repo, *args, **kwargs)
 
@@ -4133,13 +4186,13 @@
         repopath = args[0]
         if not hg.islocal(repopath):
             raise error.Abort(
-                _('only a local queue repository ' 'may be initialized')
+                _(b'only a local queue repository ' b'may be initialized')
             )
     else:
         repopath = cmdutil.findrepo(encoding.getcwd())
         if not repopath:
             raise error.Abort(
-                _('there is no Mercurial repository here ' '(.hg not found)')
+                _(b'there is no Mercurial repository here ' b'(.hg not found)')
             )
     repo = hg.repository(ui, repopath)
     return qinit(ui, repo, True)
@@ -4157,7 +4210,7 @@
     q = repo.mq
     r = q.qrepo()
     if not r:
-        raise error.Abort(_('no queue repository'))
+        raise error.Abort(_(b'no queue repository'))
     return orig(r.ui, r, *args, **kwargs)
 
 
@@ -4166,25 +4219,25 @@
     m = []
     a, u = len(q.applied), len(q.unapplied(repo))
     if a:
-        m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
+        m.append(ui.label(_(b"%d applied"), b'qseries.applied') % a)
     if u:
-        m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
+        m.append(ui.label(_(b"%d unapplied"), b'qseries.unapplied') % u)
     if m:
         # i18n: column positioning for "hg summary"
-        ui.write(_("mq:     %s\n") % ', '.join(m))
+        ui.write(_(b"mq:     %s\n") % b', '.join(m))
     else:
         # i18n: column positioning for "hg summary"
-        ui.note(_("mq:     (empty queue)\n"))
+        ui.note(_(b"mq:     (empty queue)\n"))
 
 
 revsetpredicate = registrar.revsetpredicate()
 
 
-@revsetpredicate('mq()')
+@revsetpredicate(b'mq()')
 def revsetmq(repo, subset, x):
     """Changesets managed by MQ.
     """
-    revsetlang.getargs(x, 0, 0, _("mq takes no arguments"))
+    revsetlang.getargs(x, 0, 0, _(b"mq takes no arguments"))
     applied = {repo[r.node].rev() for r in repo.mq.applied}
     return smartset.baseset([r for r in subset if r in applied])
 
@@ -4196,12 +4249,12 @@
 def extsetup(ui):
     # Ensure mq wrappers are called first, regardless of extension load order by
     # NOT wrapping in uisetup() and instead deferring to init stage two here.
-    mqopt = [('', 'mq', None, _("operate on patch repository"))]
-
-    extensions.wrapcommand(commands.table, 'import', mqimport)
-    cmdutil.summaryhooks.add('mq', summaryhook)
-
-    entry = extensions.wrapcommand(commands.table, 'init', mqinit)
+    mqopt = [(b'', b'mq', None, _(b"operate on patch repository"))]
+
+    extensions.wrapcommand(commands.table, b'import', mqimport)
+    cmdutil.summaryhooks.add(b'mq', summaryhook)
+
+    entry = extensions.wrapcommand(commands.table, b'init', mqinit)
     entry[1].extend(mqopt)
 
     def dotable(cmdtable):
@@ -4221,11 +4274,11 @@
 
 
 colortable = {
-    'qguard.negative': 'red',
-    'qguard.positive': 'yellow',
-    'qguard.unguarded': 'green',
-    'qseries.applied': 'blue bold underline',
-    'qseries.guarded': 'black bold',
-    'qseries.missing': 'red bold',
-    'qseries.unapplied': 'black bold',
+    b'qguard.negative': b'red',
+    b'qguard.positive': b'yellow',
+    b'qguard.unguarded': b'green',
+    b'qseries.applied': b'blue bold underline',
+    b'qseries.guarded': b'black bold',
+    b'qseries.missing': b'red bold',
+    b'qseries.unapplied': b'black bold',
 }
--- a/hgext/narrow/__init__.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/narrow/__init__.py	Sun Oct 06 09:48:39 2019 -0400
@@ -12,7 +12,7 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 from mercurial import (
     localrepo,
@@ -41,10 +41,10 @@
 # ellipsis nodes to be a hard requirement also enforce strictly linear
 # history for other scaling reasons.
 configitem(
-    'experimental',
-    'narrowservebrokenellipses',
+    b'experimental',
+    b'narrowservebrokenellipses',
     default=False,
-    alias=[('narrow', 'serveellipses')],
+    alias=[(b'narrow', b'serveellipses')],
 )
 
 # Export the commands table for Mercurial to see.
@@ -68,7 +68,7 @@
     if not repo.local():
         return
 
-    repo.ui.setconfig('experimental', 'narrow', True, 'narrow-ext')
+    repo.ui.setconfig(b'experimental', b'narrow', True, b'narrow-ext')
     if repository.NARROW_REQUIREMENT in repo.requirements:
         narrowrepo.wraprepo(repo)
         narrowwirepeer.reposetup(repo)
--- a/hgext/narrow/narrowbundle2.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/narrow/narrowbundle2.py	Sun Oct 06 09:48:39 2019 -0400
@@ -29,16 +29,16 @@
 from mercurial.interfaces import repository
 from mercurial.utils import stringutil
 
-_NARROWACL_SECTION = 'narrowacl'
-_CHANGESPECPART = 'narrow:changespec'
-_RESSPECS = 'narrow:responsespec'
-_SPECPART = 'narrow:spec'
-_SPECPART_INCLUDE = 'include'
-_SPECPART_EXCLUDE = 'exclude'
-_KILLNODESIGNAL = 'KILL'
-_DONESIGNAL = 'DONE'
-_ELIDEDCSHEADER = '>20s20s20sl'  # cset id, p1, p2, len(text)
-_ELIDEDMFHEADER = '>20s20s20s20sl'  # manifest id, p1, p2, link id, len(text)
+_NARROWACL_SECTION = b'narrowacl'
+_CHANGESPECPART = b'narrow:changespec'
+_RESSPECS = b'narrow:responsespec'
+_SPECPART = b'narrow:spec'
+_SPECPART_INCLUDE = b'include'
+_SPECPART_EXCLUDE = b'exclude'
+_KILLNODESIGNAL = b'KILL'
+_DONESIGNAL = b'DONE'
+_ELIDEDCSHEADER = b'>20s20s20sl'  # cset id, p1, p2, len(text)
+_ELIDEDMFHEADER = b'>20s20s20s20sl'  # manifest id, p1, p2, link id, len(text)
 _CSHEADERSIZE = struct.calcsize(_ELIDEDCSHEADER)
 _MFHEADERSIZE = struct.calcsize(_ELIDEDMFHEADER)
 
@@ -53,16 +53,16 @@
     common=None,
     **kwargs
 ):
-    assert repo.ui.configbool('experimental', 'narrowservebrokenellipses')
+    assert repo.ui.configbool(b'experimental', b'narrowservebrokenellipses')
 
-    cgversions = b2caps.get('changegroup')
+    cgversions = b2caps.get(b'changegroup')
     cgversions = [
         v
         for v in cgversions
         if v in changegroup.supportedoutgoingversions(repo)
     ]
     if not cgversions:
-        raise ValueError(_('no common changegroup version'))
+        raise ValueError(_(b'no common changegroup version'))
     version = max(cgversions)
 
     oldinclude = sorted(filter(bool, kwargs.get(r'oldincludepats', [])))
@@ -104,7 +104,7 @@
     if depth is not None:
         depth = int(depth)
         if depth < 1:
-            raise error.Abort(_('depth must be positive, got %d') % depth)
+            raise error.Abort(_(b'depth must be positive, got %d') % depth)
 
     heads = set(heads or repo.heads())
     common = set(common or [nullid])
@@ -127,7 +127,7 @@
         # until they've built up the full new state.
         # Convert to revnums and intersect with "common". The client should
         # have made it a subset of "common" already, but let's be safe.
-        known = set(repo.revs("%ln & ::%ln", known, common))
+        known = set(repo.revs(b"%ln & ::%ln", known, common))
         # TODO: we could send only roots() of this set, and the
         # list of nodes in common, and the client could work out
         # what to strip, instead of us explicitly sending every
@@ -154,18 +154,18 @@
                 ellipsisroots=newellipsis,
                 fullnodes=newfull,
             )
-            cgdata = packer.generate(common, newvisit, False, 'narrow_widen')
+            cgdata = packer.generate(common, newvisit, False, b'narrow_widen')
 
-            part = bundler.newpart('changegroup', data=cgdata)
-            part.addparam('version', version)
-            if 'treemanifest' in repo.requirements:
-                part.addparam('treemanifest', '1')
+            part = bundler.newpart(b'changegroup', data=cgdata)
+            part.addparam(b'version', version)
+            if b'treemanifest' in repo.requirements:
+                part.addparam(b'treemanifest', b'1')
 
     visitnodes, relevant_nodes, ellipsisroots = exchange._computeellipsis(
         repo, common, heads, set(), newmatch, depth=depth
     )
 
-    repo.ui.debug('Found %d relevant revs\n' % len(relevant_nodes))
+    repo.ui.debug(b'Found %d relevant revs\n' % len(relevant_nodes))
     if visitnodes:
         packer = changegroup.getbundler(
             version,
@@ -176,12 +176,12 @@
             ellipsisroots=ellipsisroots,
             fullnodes=relevant_nodes,
         )
-        cgdata = packer.generate(common, visitnodes, False, 'narrow_widen')
+        cgdata = packer.generate(common, visitnodes, False, b'narrow_widen')
 
-        part = bundler.newpart('changegroup', data=cgdata)
-        part.addparam('version', version)
-        if 'treemanifest' in repo.requirements:
-            part.addparam('treemanifest', '1')
+        part = bundler.newpart(b'changegroup', data=cgdata)
+        part.addparam(b'version', version)
+        if b'treemanifest' in repo.requirements:
+            part.addparam(b'treemanifest', b'1')
 
 
 @bundle2.parthandler(_SPECPART, (_SPECPART_INCLUDE, _SPECPART_EXCLUDE))
@@ -190,8 +190,8 @@
     # released. New servers will send a mandatory bundle2 part named
     # 'Narrowspec' and will send specs as data instead of params.
     # Refer to issue5952 and 6019
-    includepats = set(inpart.params.get(_SPECPART_INCLUDE, '').splitlines())
-    excludepats = set(inpart.params.get(_SPECPART_EXCLUDE, '').splitlines())
+    includepats = set(inpart.params.get(_SPECPART_INCLUDE, b'').splitlines())
+    excludepats = set(inpart.params.get(_SPECPART_EXCLUDE, b'').splitlines())
     narrowspec.validatepatterns(includepats)
     narrowspec.validatepatterns(excludepats)
 
@@ -205,7 +205,7 @@
 @bundle2.parthandler(_RESSPECS)
 def _handlenarrowspecs(op, inpart):
     data = inpart.read()
-    inc, exc = data.split('\0')
+    inc, exc = data.split(b'\0')
     includepats = set(inc.splitlines())
     excludepats = set(exc.splitlines())
     narrowspec.validatepatterns(includepats)
@@ -241,7 +241,7 @@
                 clkills.add(ck)
         else:
             raise error.Abort(
-                _('unexpected changespec node chunk type: %s') % chunksignal
+                _(b'unexpected changespec node chunk type: %s') % chunksignal
             )
         chunksignal = changegroup.readexactly(inpart, 4)
 
@@ -255,7 +255,7 @@
 
         localrepo.localrepository._bookmarks.set(repo, dummybmstore())
         chgrpfile = repair.strip(
-            op.ui, repo, list(clkills), backup=True, topic='widen'
+            op.ui, repo, list(clkills), backup=True, topic=b'widen'
         )
         if chgrpfile:
             op._widen_uninterr = repo.ui.uninterruptible()
@@ -266,7 +266,7 @@
     # will currently always be there when using the core+narrowhg server, but
     # other servers may include a changespec part even when not widening (e.g.
     # because we're deepening a shallow repo).
-    if util.safehasattr(repo, 'setnewnarrowpats'):
+    if util.safehasattr(repo, b'setnewnarrowpats'):
         repo.setnewnarrowpats()
 
 
@@ -281,20 +281,22 @@
     del op._widen_bundle
     vfs = repo.vfs
 
-    ui.note(_("adding branch\n"))
-    f = vfs.open(chgrpfile, "rb")
+    ui.note(_(b"adding branch\n"))
+    f = vfs.open(chgrpfile, b"rb")
     try:
         gen = exchange.readbundle(ui, f, chgrpfile, vfs)
         # silence internal shuffling chatter
-        override = {('ui', 'quiet'): True}
+        override = {(b'ui', b'quiet'): True}
         if ui.verbose:
             override = {}
         with ui.configoverride(override):
             if isinstance(gen, bundle2.unbundle20):
-                with repo.transaction('strip') as tr:
+                with repo.transaction(b'strip') as tr:
                     bundle2.processbundle(repo, gen, lambda: tr)
             else:
-                gen.apply(repo, 'strip', 'bundle:' + vfs.join(chgrpfile), True)
+                gen.apply(
+                    repo, b'strip', b'bundle:' + vfs.join(chgrpfile), True
+                )
     finally:
         f.close()
 
@@ -305,7 +307,7 @@
         except OSError as e:
             if e.errno != errno.ENOENT:
                 ui.warn(
-                    _('error removing %s: %s\n')
+                    _(b'error removing %s: %s\n')
                     % (undovfs.join(undofile), stringutil.forcebytestr(e))
                 )
 
@@ -318,14 +320,14 @@
     """Enable narrow repo support in bundle2-related extension points."""
     getbundleargs = wireprototypes.GETBUNDLE_ARGUMENTS
 
-    getbundleargs['narrow'] = 'boolean'
-    getbundleargs['depth'] = 'plain'
-    getbundleargs['oldincludepats'] = 'csv'
-    getbundleargs['oldexcludepats'] = 'csv'
-    getbundleargs['known'] = 'csv'
+    getbundleargs[b'narrow'] = b'boolean'
+    getbundleargs[b'depth'] = b'plain'
+    getbundleargs[b'oldincludepats'] = b'csv'
+    getbundleargs[b'oldexcludepats'] = b'csv'
+    getbundleargs[b'known'] = b'csv'
 
     # Extend changegroup serving to handle requests from narrow clients.
-    origcgfn = exchange.getbundle2partsmapping['changegroup']
+    origcgfn = exchange.getbundle2partsmapping[b'changegroup']
 
     def wrappedcgfn(*args, **kwargs):
         repo = args[1]
@@ -333,26 +335,26 @@
             kwargs = exchange.applynarrowacl(repo, kwargs)
 
         if kwargs.get(r'narrow', False) and repo.ui.configbool(
-            'experimental', 'narrowservebrokenellipses'
+            b'experimental', b'narrowservebrokenellipses'
         ):
             getbundlechangegrouppart_narrow(*args, **kwargs)
         else:
             origcgfn(*args, **kwargs)
 
-    exchange.getbundle2partsmapping['changegroup'] = wrappedcgfn
+    exchange.getbundle2partsmapping[b'changegroup'] = wrappedcgfn
 
     # Extend changegroup receiver so client can fixup after widen requests.
-    origcghandler = bundle2.parthandlermapping['changegroup']
+    origcghandler = bundle2.parthandlermapping[b'changegroup']
 
     def wrappedcghandler(op, inpart):
         origcghandler(op, inpart)
-        if util.safehasattr(op, '_widen_bundle'):
+        if util.safehasattr(op, b'_widen_bundle'):
             handlechangegroup_widen(op, inpart)
-        if util.safehasattr(op, '_bookmarksbackup'):
+        if util.safehasattr(op, b'_bookmarksbackup'):
             localrepo.localrepository._bookmarks.set(
                 op.repo, op._bookmarksbackup
             )
             del op._bookmarksbackup
 
     wrappedcghandler.params = origcghandler.params
-    bundle2.parthandlermapping['changegroup'] = wrappedcghandler
+    bundle2.parthandlermapping[b'changegroup'] = wrappedcghandler
--- a/hgext/narrow/narrowcommands.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/narrow/narrowcommands.py	Sun Oct 06 09:48:39 2019 -0400
@@ -39,59 +39,69 @@
 def setup():
     """Wraps user-facing mercurial commands with narrow-aware versions."""
 
-    entry = extensions.wrapcommand(commands.table, 'clone', clonenarrowcmd)
+    entry = extensions.wrapcommand(commands.table, b'clone', clonenarrowcmd)
     entry[1].append(
-        ('', 'narrow', None, _("create a narrow clone of select files"))
+        (b'', b'narrow', None, _(b"create a narrow clone of select files"))
     )
     entry[1].append(
-        ('', 'depth', '', _("limit the history fetched by distance from heads"))
+        (
+            b'',
+            b'depth',
+            b'',
+            _(b"limit the history fetched by distance from heads"),
+        )
     )
-    entry[1].append(('', 'narrowspec', '', _("read narrowspecs from file")))
+    entry[1].append((b'', b'narrowspec', b'', _(b"read narrowspecs from file")))
     # TODO(durin42): unify sparse/narrow --include/--exclude logic a bit
-    if 'sparse' not in extensions.enabled():
+    if b'sparse' not in extensions.enabled():
         entry[1].append(
-            ('', 'include', [], _("specifically fetch this file/directory"))
+            (b'', b'include', [], _(b"specifically fetch this file/directory"))
         )
         entry[1].append(
             (
-                '',
-                'exclude',
+                b'',
+                b'exclude',
                 [],
-                _("do not fetch this file/directory, even if included"),
+                _(b"do not fetch this file/directory, even if included"),
             )
         )
 
-    entry = extensions.wrapcommand(commands.table, 'pull', pullnarrowcmd)
+    entry = extensions.wrapcommand(commands.table, b'pull', pullnarrowcmd)
     entry[1].append(
-        ('', 'depth', '', _("limit the history fetched by distance from heads"))
+        (
+            b'',
+            b'depth',
+            b'',
+            _(b"limit the history fetched by distance from heads"),
+        )
     )
 
-    extensions.wrapcommand(commands.table, 'archive', archivenarrowcmd)
+    extensions.wrapcommand(commands.table, b'archive', archivenarrowcmd)
 
 
 def clonenarrowcmd(orig, ui, repo, *args, **opts):
     """Wraps clone command, so 'hg clone' first wraps localrepo.clone()."""
     opts = pycompat.byteskwargs(opts)
     wrappedextraprepare = util.nullcontextmanager()
-    narrowspecfile = opts['narrowspec']
+    narrowspecfile = opts[b'narrowspec']
 
     if narrowspecfile:
         filepath = os.path.join(encoding.getcwd(), narrowspecfile)
-        ui.status(_("reading narrowspec from '%s'\n") % filepath)
+        ui.status(_(b"reading narrowspec from '%s'\n") % filepath)
         try:
             fdata = util.readfile(filepath)
         except IOError as inst:
             raise error.Abort(
-                _("cannot read narrowspecs from '%s': %s")
+                _(b"cannot read narrowspecs from '%s': %s")
                 % (filepath, encoding.strtolocal(inst.strerror))
             )
 
-        includes, excludes, profiles = sparse.parseconfig(ui, fdata, 'narrow')
+        includes, excludes, profiles = sparse.parseconfig(ui, fdata, b'narrow')
         if profiles:
             raise error.Abort(
                 _(
-                    "cannot specify other files using '%include' in"
-                    " narrowspec"
+                    b"cannot specify other files using '%include' in"
+                    b" narrowspec"
                 )
             )
 
@@ -99,20 +109,20 @@
         narrowspec.validatepatterns(excludes)
 
         # narrowspec is passed so we should assume that user wants narrow clone
-        opts['narrow'] = True
-        opts['include'].extend(includes)
-        opts['exclude'].extend(excludes)
+        opts[b'narrow'] = True
+        opts[b'include'].extend(includes)
+        opts[b'exclude'].extend(excludes)
 
-    if opts['narrow']:
+    if opts[b'narrow']:
 
         def pullbundle2extraprepare_widen(orig, pullop, kwargs):
             orig(pullop, kwargs)
 
-            if opts.get('depth'):
-                kwargs['depth'] = opts['depth']
+            if opts.get(b'depth'):
+                kwargs[b'depth'] = opts[b'depth']
 
         wrappedextraprepare = extensions.wrappedfunction(
-            exchange, '_pullbundle2extraprepare', pullbundle2extraprepare_widen
+            exchange, b'_pullbundle2extraprepare', pullbundle2extraprepare_widen
         )
 
     with wrappedextraprepare:
@@ -127,10 +137,10 @@
         def pullbundle2extraprepare_widen(orig, pullop, kwargs):
             orig(pullop, kwargs)
             if opts.get(r'depth'):
-                kwargs['depth'] = opts[r'depth']
+                kwargs[b'depth'] = opts[r'depth']
 
         wrappedextraprepare = extensions.wrappedfunction(
-            exchange, '_pullbundle2extraprepare', pullbundle2extraprepare_widen
+            exchange, b'_pullbundle2extraprepare', pullbundle2extraprepare_widen
         )
 
     with wrappedextraprepare:
@@ -159,33 +169,33 @@
         return orig(pullop, kwargs)
 
     if wireprototypes.NARROWCAP not in pullop.remote.capabilities():
-        raise error.Abort(_("server does not support narrow clones"))
+        raise error.Abort(_(b"server does not support narrow clones"))
     orig(pullop, kwargs)
-    kwargs['narrow'] = True
+    kwargs[b'narrow'] = True
     include, exclude = repo.narrowpats
-    kwargs['oldincludepats'] = include
-    kwargs['oldexcludepats'] = exclude
+    kwargs[b'oldincludepats'] = include
+    kwargs[b'oldexcludepats'] = exclude
     if include:
-        kwargs['includepats'] = include
+        kwargs[b'includepats'] = include
     if exclude:
-        kwargs['excludepats'] = exclude
+        kwargs[b'excludepats'] = exclude
     # calculate known nodes only in ellipses cases because in non-ellipses cases
     # we have all the nodes
     if wireprototypes.ELLIPSESCAP1 in pullop.remote.capabilities():
-        kwargs['known'] = [
+        kwargs[b'known'] = [
             node.hex(ctx.node())
-            for ctx in repo.set('::%ln', pullop.common)
+            for ctx in repo.set(b'::%ln', pullop.common)
             if ctx.node() != node.nullid
         ]
-        if not kwargs['known']:
+        if not kwargs[b'known']:
             # Mercurial serializes an empty list as '' and deserializes it as
             # [''], so delete it instead to avoid handling the empty string on
             # the server.
-            del kwargs['known']
+            del kwargs[b'known']
 
 
 extensions.wrapfunction(
-    exchange, '_pullbundle2extraprepare', pullbundle2extraprepare
+    exchange, b'_pullbundle2extraprepare', pullbundle2extraprepare
 )
 
 
@@ -208,62 +218,64 @@
     # have any changes to files that will be untracked.
     unfi = repo.unfiltered()
     outgoing = discovery.findcommonoutgoing(unfi, remote, commoninc=commoninc)
-    ui.status(_('looking for local changes to affected paths\n'))
+    ui.status(_(b'looking for local changes to affected paths\n'))
     localnodes = []
     for n in itertools.chain(outgoing.missing, outgoing.excluded):
         if any(oldmatch(f) and not newmatch(f) for f in unfi[n].files()):
             localnodes.append(n)
-    revstostrip = unfi.revs('descendants(%ln)', localnodes)
-    hiddenrevs = repoview.filterrevs(repo, 'visible')
+    revstostrip = unfi.revs(b'descendants(%ln)', localnodes)
+    hiddenrevs = repoview.filterrevs(repo, b'visible')
     visibletostrip = list(
         repo.changelog.node(r) for r in (revstostrip - hiddenrevs)
     )
     if visibletostrip:
         ui.status(
             _(
-                'The following changeset(s) or their ancestors have '
-                'local changes not on the remote:\n'
+                b'The following changeset(s) or their ancestors have '
+                b'local changes not on the remote:\n'
             )
         )
         maxnodes = 10
         if ui.verbose or len(visibletostrip) <= maxnodes:
             for n in visibletostrip:
-                ui.status('%s\n' % node.short(n))
+                ui.status(b'%s\n' % node.short(n))
         else:
             for n in visibletostrip[:maxnodes]:
-                ui.status('%s\n' % node.short(n))
+                ui.status(b'%s\n' % node.short(n))
             ui.status(
-                _('...and %d more, use --verbose to list all\n')
+                _(b'...and %d more, use --verbose to list all\n')
                 % (len(visibletostrip) - maxnodes)
             )
         if not force:
             raise error.Abort(
-                _('local changes found'),
-                hint=_('use --force-delete-local-changes to ' 'ignore'),
+                _(b'local changes found'),
+                hint=_(b'use --force-delete-local-changes to ' b'ignore'),
             )
 
     with ui.uninterruptible():
         if revstostrip:
             tostrip = [unfi.changelog.node(r) for r in revstostrip]
-            if repo['.'].node() in tostrip:
+            if repo[b'.'].node() in tostrip:
                 # stripping working copy, so move to a different commit first
                 urev = max(
                     repo.revs(
-                        '(::%n) - %ln + null', repo['.'].node(), visibletostrip
+                        b'(::%n) - %ln + null',
+                        repo[b'.'].node(),
+                        visibletostrip,
                     )
                 )
                 hg.clean(repo, urev)
-            overrides = {('devel', 'strip-obsmarkers'): False}
-            with ui.configoverride(overrides, 'narrow'):
-                repair.strip(ui, unfi, tostrip, topic='narrow')
+            overrides = {(b'devel', b'strip-obsmarkers'): False}
+            with ui.configoverride(overrides, b'narrow'):
+                repair.strip(ui, unfi, tostrip, topic=b'narrow')
 
         todelete = []
         for f, f2, size in repo.store.datafiles():
-            if f.startswith('data/'):
+            if f.startswith(b'data/'):
                 file = f[5:-2]
                 if not newmatch(file):
                     todelete.append(f)
-            elif f.startswith('meta/'):
+            elif f.startswith(b'meta/'):
                 dir = f[5:-13]
                 dirs = sorted(util.dirs({dir})) + [dir]
                 include = True
@@ -272,20 +284,20 @@
                     if not visit:
                         include = False
                         break
-                    if visit == 'all':
+                    if visit == b'all':
                         break
                 if not include:
                     todelete.append(f)
 
         repo.destroying()
 
-        with repo.transaction('narrowing'):
+        with repo.transaction(b'narrowing'):
             # Update narrowspec before removing revlogs, so repo won't be
             # corrupt in case of crash
             repo.setnarrowpats(newincludes, newexcludes)
 
             for f in todelete:
-                ui.status(_('deleting %s\n') % f)
+                ui.status(_(b'deleting %s\n') % f)
                 util.unlinkpath(repo.svfs.join(f))
                 repo.store.markremoved(f)
 
@@ -327,11 +339,11 @@
     def pullbundle2extraprepare_widen(orig, pullop, kwargs):
         orig(pullop, kwargs)
         # The old{in,ex}cludepats have already been set by orig()
-        kwargs['includepats'] = newincludes
-        kwargs['excludepats'] = newexcludes
+        kwargs[b'includepats'] = newincludes
+        kwargs[b'excludepats'] = newexcludes
 
     wrappedextraprepare = extensions.wrappedfunction(
-        exchange, '_pullbundle2extraprepare', pullbundle2extraprepare_widen
+        exchange, b'_pullbundle2extraprepare', pullbundle2extraprepare_widen
     )
 
     # define a function that narrowbundle2 can call after creating the
@@ -341,7 +353,7 @@
 
     repo.setnewnarrowpats = setnewnarrowpats
     # silence the devel-warning of applying an empty changegroup
-    overrides = {('devel', 'all-warnings'): False}
+    overrides = {(b'devel', b'all-warnings'): False}
 
     common = commoninc[0]
     with ui.uninterruptible():
@@ -358,28 +370,30 @@
             if ellipsesremote:
                 known = [
                     ctx.node()
-                    for ctx in repo.set('::%ln', common)
+                    for ctx in repo.set(b'::%ln', common)
                     if ctx.node() != node.nullid
                 ]
             with remote.commandexecutor() as e:
                 bundle = e.callcommand(
-                    'narrow_widen',
+                    b'narrow_widen',
                     {
-                        'oldincludes': oldincludes,
-                        'oldexcludes': oldexcludes,
-                        'newincludes': newincludes,
-                        'newexcludes': newexcludes,
-                        'cgversion': '03',
-                        'commonheads': common,
-                        'known': known,
-                        'ellipses': ellipsesremote,
+                        b'oldincludes': oldincludes,
+                        b'oldexcludes': oldexcludes,
+                        b'newincludes': newincludes,
+                        b'newexcludes': newexcludes,
+                        b'cgversion': b'03',
+                        b'commonheads': common,
+                        b'known': known,
+                        b'ellipses': ellipsesremote,
                     },
                 ).result()
 
-            trmanager = exchange.transactionmanager(repo, 'widen', remote.url())
-            with trmanager, repo.ui.configoverride(overrides, 'widen'):
+            trmanager = exchange.transactionmanager(
+                repo, b'widen', remote.url()
+            )
+            with trmanager, repo.ui.configoverride(overrides, b'widen'):
                 op = bundle2.bundleoperation(
-                    repo, trmanager.transaction, source='widen'
+                    repo, trmanager.transaction, source=b'widen'
                 )
                 # TODO: we should catch error.Abort here
                 bundle2.processbundle(repo, bundle, op=op)
@@ -388,7 +402,7 @@
             with ds.parentchange():
                 ds.setparents(p1, p2)
 
-        with repo.transaction('widening'):
+        with repo.transaction(b'widening'):
             repo.setnewnarrowpats()
             narrowspec.updateworkingcopy(repo)
             narrowspec.copytoworkingcopy(repo)
@@ -396,35 +410,40 @@
 
 # TODO(rdamazio): Make new matcher format and update description
 @command(
-    'tracked',
+    b'tracked',
     [
-        ('', 'addinclude', [], _('new paths to include')),
-        ('', 'removeinclude', [], _('old paths to no longer include')),
+        (b'', b'addinclude', [], _(b'new paths to include')),
+        (b'', b'removeinclude', [], _(b'old paths to no longer include')),
         (
-            '',
-            'auto-remove-includes',
+            b'',
+            b'auto-remove-includes',
             False,
-            _('automatically choose unused includes to remove'),
+            _(b'automatically choose unused includes to remove'),
         ),
-        ('', 'addexclude', [], _('new paths to exclude')),
-        ('', 'import-rules', '', _('import narrowspecs from a file')),
-        ('', 'removeexclude', [], _('old paths to no longer exclude')),
-        ('', 'clear', False, _('whether to replace the existing narrowspec')),
+        (b'', b'addexclude', [], _(b'new paths to exclude')),
+        (b'', b'import-rules', b'', _(b'import narrowspecs from a file')),
+        (b'', b'removeexclude', [], _(b'old paths to no longer exclude')),
         (
-            '',
-            'force-delete-local-changes',
+            b'',
+            b'clear',
             False,
-            _('forces deletion of local changes when narrowing'),
+            _(b'whether to replace the existing narrowspec'),
         ),
         (
-            '',
-            'update-working-copy',
+            b'',
+            b'force-delete-local-changes',
             False,
-            _('update working copy when the store has changed'),
+            _(b'forces deletion of local changes when narrowing'),
+        ),
+        (
+            b'',
+            b'update-working-copy',
+            False,
+            _(b'update working copy when the store has changed'),
         ),
     ]
     + commands.remoteopts,
-    _('[OPTIONS]... [REMOTE]'),
+    _(b'[OPTIONS]... [REMOTE]'),
     inferrepo=True,
 )
 def trackedcmd(ui, repo, remotepath=None, *pats, **opts):
@@ -464,47 +483,47 @@
     if repository.NARROW_REQUIREMENT not in repo.requirements:
         raise error.Abort(
             _(
-                'the tracked command is only supported on '
-                'repositories cloned with --narrow'
+                b'the tracked command is only supported on '
+                b'repositories cloned with --narrow'
             )
         )
 
     # Before supporting, decide whether it "hg tracked --clear" should mean
     # tracking no paths or all paths.
-    if opts['clear']:
-        raise error.Abort(_('the --clear option is not yet supported'))
+    if opts[b'clear']:
+        raise error.Abort(_(b'the --clear option is not yet supported'))
 
     # import rules from a file
-    newrules = opts.get('import_rules')
+    newrules = opts.get(b'import_rules')
     if newrules:
         try:
             filepath = os.path.join(encoding.getcwd(), newrules)
             fdata = util.readfile(filepath)
         except IOError as inst:
             raise error.Abort(
-                _("cannot read narrowspecs from '%s': %s")
+                _(b"cannot read narrowspecs from '%s': %s")
                 % (filepath, encoding.strtolocal(inst.strerror))
             )
         includepats, excludepats, profiles = sparse.parseconfig(
-            ui, fdata, 'narrow'
+            ui, fdata, b'narrow'
         )
         if profiles:
             raise error.Abort(
                 _(
-                    "including other spec files using '%include' "
-                    "is not supported in narrowspec"
+                    b"including other spec files using '%include' "
+                    b"is not supported in narrowspec"
                 )
             )
-        opts['addinclude'].extend(includepats)
-        opts['addexclude'].extend(excludepats)
+        opts[b'addinclude'].extend(includepats)
+        opts[b'addexclude'].extend(excludepats)
 
-    addedincludes = narrowspec.parsepatterns(opts['addinclude'])
-    removedincludes = narrowspec.parsepatterns(opts['removeinclude'])
-    addedexcludes = narrowspec.parsepatterns(opts['addexclude'])
-    removedexcludes = narrowspec.parsepatterns(opts['removeexclude'])
-    autoremoveincludes = opts['auto_remove_includes']
+    addedincludes = narrowspec.parsepatterns(opts[b'addinclude'])
+    removedincludes = narrowspec.parsepatterns(opts[b'removeinclude'])
+    addedexcludes = narrowspec.parsepatterns(opts[b'addexclude'])
+    removedexcludes = narrowspec.parsepatterns(opts[b'removeexclude'])
+    autoremoveincludes = opts[b'auto_remove_includes']
 
-    update_working_copy = opts['update_working_copy']
+    update_working_copy = opts[b'update_working_copy']
     only_show = not (
         addedincludes
         or removedincludes
@@ -529,27 +548,27 @@
 
     # Only print the current narrowspec.
     if only_show:
-        ui.pager('tracked')
-        fm = ui.formatter('narrow', opts)
+        ui.pager(b'tracked')
+        fm = ui.formatter(b'narrow', opts)
         for i in sorted(oldincludes):
             fm.startitem()
-            fm.write('status', '%s ', 'I', label='narrow.included')
-            fm.write('pat', '%s\n', i, label='narrow.included')
+            fm.write(b'status', b'%s ', b'I', label=b'narrow.included')
+            fm.write(b'pat', b'%s\n', i, label=b'narrow.included')
         for i in sorted(oldexcludes):
             fm.startitem()
-            fm.write('status', '%s ', 'X', label='narrow.excluded')
-            fm.write('pat', '%s\n', i, label='narrow.excluded')
+            fm.write(b'status', b'%s ', b'X', label=b'narrow.excluded')
+            fm.write(b'pat', b'%s\n', i, label=b'narrow.excluded')
         fm.end()
         return 0
 
     if update_working_copy:
-        with repo.wlock(), repo.lock(), repo.transaction('narrow-wc'):
+        with repo.wlock(), repo.lock(), repo.transaction(b'narrow-wc'):
             narrowspec.updateworkingcopy(repo)
             narrowspec.copytoworkingcopy(repo)
         return 0
 
     if not (widening or narrowing or autoremoveincludes):
-        ui.status(_("nothing to widen or narrow\n"))
+        ui.status(_(b"nothing to widen or narrow\n"))
         return 0
 
     with repo.wlock(), repo.lock():
@@ -558,16 +577,16 @@
         # Find the revisions we have in common with the remote. These will
         # be used for finding local-only changes for narrowing. They will
         # also define the set of revisions to update for widening.
-        remotepath = ui.expandpath(remotepath or 'default')
+        remotepath = ui.expandpath(remotepath or b'default')
         url, branches = hg.parseurl(remotepath)
-        ui.status(_('comparing with %s\n') % util.hidepassword(url))
+        ui.status(_(b'comparing with %s\n') % util.hidepassword(url))
         remote = hg.peer(repo, opts, url)
 
         # check narrow support before doing anything if widening needs to be
         # performed. In future we should also abort if client is ellipses and
         # server does not support ellipses
         if widening and wireprototypes.NARROWCAP not in remote.capabilities():
-            raise error.Abort(_("server does not support narrow clones"))
+            raise error.Abort(_(b"server does not support narrow clones"))
 
         commoninc = discovery.findcommonincoming(repo, remote)
 
@@ -575,7 +594,7 @@
             outgoing = discovery.findcommonoutgoing(
                 repo, remote, commoninc=commoninc
             )
-            ui.status(_('looking for unused includes to remove\n'))
+            ui.status(_(b'looking for unused includes to remove\n'))
             localfiles = set()
             for n in itertools.chain(outgoing.missing, outgoing.excluded):
                 localfiles.update(repo[n].files())
@@ -586,17 +605,20 @@
                     suggestedremovals.append(include)
             if suggestedremovals:
                 for s in suggestedremovals:
-                    ui.status('%s\n' % s)
+                    ui.status(b'%s\n' % s)
                 if (
                     ui.promptchoice(
-                        _('remove these unused includes (yn)?' '$$ &Yes $$ &No')
+                        _(
+                            b'remove these unused includes (yn)?'
+                            b'$$ &Yes $$ &No'
+                        )
                     )
                     == 0
                 ):
                     removedincludes.update(suggestedremovals)
                     narrowing = True
             else:
-                ui.status(_('found no unused includes\n'))
+                ui.status(_(b'found no unused includes\n'))
 
         if narrowing:
             newincludes = oldincludes - removedincludes
@@ -610,7 +632,7 @@
                 oldexcludes,
                 newincludes,
                 newexcludes,
-                opts['force_delete_local_changes'],
+                opts[b'force_delete_local_changes'],
             )
             # _narrow() updated the narrowspec and _widen() below needs to
             # use the updated values as its base (otherwise removed includes
--- a/hgext/narrow/narrowdirstate.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/narrow/narrowdirstate.py	Sun Oct 06 09:48:39 2019 -0400
@@ -21,8 +21,8 @@
                 if f is not None and not narrowmatch(f) and f not in self:
                     raise error.Abort(
                         _(
-                            "cannot track '%s' - it is outside "
-                            + "the narrow clone"
+                            b"cannot track '%s' - it is outside "
+                            + b"the narrow clone"
                         )
                         % f
                     )
--- a/hgext/narrow/narrowtemplates.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/narrow/narrowtemplates.py	Sun Oct 06 09:48:39 2019 -0400
@@ -23,30 +23,30 @@
     return False
 
 
-@templatekeyword('ellipsis', requires={'repo', 'ctx'})
+@templatekeyword(b'ellipsis', requires={b'repo', b'ctx'})
 def ellipsis(context, mapping):
     """String. 'ellipsis' if the change is an ellipsis node, else ''."""
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
     if _isellipsis(repo, ctx.rev()):
-        return 'ellipsis'
-    return ''
+        return b'ellipsis'
+    return b''
 
 
-@templatekeyword('outsidenarrow', requires={'repo', 'ctx'})
+@templatekeyword(b'outsidenarrow', requires={b'repo', b'ctx'})
 def outsidenarrow(context, mapping):
     """String. 'outsidenarrow' if the change affects no tracked files,
     else ''."""
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
     m = repo.narrowmatch()
     if ctx.files() and not m.always():
         if not any(m(f) for f in ctx.files()):
-            return 'outsidenarrow'
-    return ''
+            return b'outsidenarrow'
+    return b''
 
 
-@revsetpredicate('ellipsis()')
+@revsetpredicate(b'ellipsis()')
 def ellipsisrevset(repo, subset, x):
     """Changesets that are ellipsis nodes."""
     return subset.filter(lambda r: _isellipsis(repo, r))
--- a/hgext/narrow/narrowwirepeer.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/narrow/narrowwirepeer.py	Sun Oct 06 09:48:39 2019 -0400
@@ -29,26 +29,26 @@
 def reposetup(repo):
     def wirereposetup(ui, peer):
         def wrapped(orig, cmd, *args, **kwargs):
-            if cmd == 'unbundle':
+            if cmd == b'unbundle':
                 # TODO: don't blindly add include/exclude wireproto
                 # arguments to unbundle.
                 include, exclude = repo.narrowpats
-                kwargs[r"includepats"] = ','.join(include)
-                kwargs[r"excludepats"] = ','.join(exclude)
+                kwargs[r"includepats"] = b','.join(include)
+                kwargs[r"excludepats"] = b','.join(exclude)
             return orig(cmd, *args, **kwargs)
 
-        extensions.wrapfunction(peer, '_calltwowaystream', wrapped)
+        extensions.wrapfunction(peer, b'_calltwowaystream', wrapped)
 
     hg.wirepeersetupfuncs.append(wirereposetup)
 
 
 @wireprotov1server.wireprotocommand(
-    'narrow_widen',
-    'oldincludes oldexcludes'
-    ' newincludes newexcludes'
-    ' commonheads cgversion'
-    ' known ellipses',
-    permission='pull',
+    b'narrow_widen',
+    b'oldincludes oldexcludes'
+    b' newincludes newexcludes'
+    b' commonheads cgversion'
+    b' known ellipses',
+    permission=b'pull',
 )
 def narrow_widen(
     repo,
@@ -95,7 +95,7 @@
 
         common = wireprototypes.decodelist(commonheads)
         known = wireprototypes.decodelist(known)
-        if ellipses == '0':
+        if ellipses == b'0':
             ellipses = False
         else:
             ellipses = bool(ellipses)
@@ -135,11 +135,11 @@
             )
     except error.Abort as exc:
         bundler = bundle2.bundle20(repo.ui)
-        manargs = [('message', pycompat.bytestr(exc))]
+        manargs = [(b'message', pycompat.bytestr(exc))]
         advargs = []
         if exc.hint is not None:
-            advargs.append(('hint', exc.hint))
-        bundler.addpart(bundle2.bundlepart('error:abort', manargs, advargs))
+            advargs.append((b'hint', exc.hint))
+        bundler.addpart(bundle2.bundlepart(b'error:abort', manargs, advargs))
         preferuncompressed = True
 
     chunks = bundler.getchunks()
@@ -155,6 +155,6 @@
     for ch in (r'oldincludes', r'newincludes', r'oldexcludes', r'newexcludes'):
         kwargs[ch] = b','.join(kwargs[ch])
 
-    kwargs[r'ellipses'] = '%i' % bool(kwargs[r'ellipses'])
-    f = remote._callcompressable('narrow_widen', **kwargs)
+    kwargs[r'ellipses'] = b'%i' % bool(kwargs[r'ellipses'])
+    f = remote._callcompressable(b'narrow_widen', **kwargs)
     return bundle2.getunbundler(remote.ui, f)
--- a/hgext/notify.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/notify.py	Sun Oct 06 09:48:39 2019 -0400
@@ -173,67 +173,67 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 configtable = {}
 configitem = registrar.configitem(configtable)
 
 configitem(
-    'notify', 'changegroup', default=None,
+    b'notify', b'changegroup', default=None,
 )
 configitem(
-    'notify', 'config', default=None,
+    b'notify', b'config', default=None,
 )
 configitem(
-    'notify', 'diffstat', default=True,
+    b'notify', b'diffstat', default=True,
 )
 configitem(
-    'notify', 'domain', default=None,
+    b'notify', b'domain', default=None,
 )
 configitem(
-    'notify', 'messageidseed', default=None,
+    b'notify', b'messageidseed', default=None,
 )
 configitem(
-    'notify', 'fromauthor', default=None,
+    b'notify', b'fromauthor', default=None,
 )
 configitem(
-    'notify', 'incoming', default=None,
+    b'notify', b'incoming', default=None,
 )
 configitem(
-    'notify', 'maxdiff', default=300,
+    b'notify', b'maxdiff', default=300,
 )
 configitem(
-    'notify', 'maxdiffstat', default=-1,
+    b'notify', b'maxdiffstat', default=-1,
 )
 configitem(
-    'notify', 'maxsubject', default=67,
+    b'notify', b'maxsubject', default=67,
 )
 configitem(
-    'notify', 'mbox', default=None,
+    b'notify', b'mbox', default=None,
 )
 configitem(
-    'notify', 'merge', default=True,
+    b'notify', b'merge', default=True,
 )
 configitem(
-    'notify', 'outgoing', default=None,
+    b'notify', b'outgoing', default=None,
 )
 configitem(
-    'notify', 'sources', default='serve',
+    b'notify', b'sources', default=b'serve',
 )
 configitem(
-    'notify', 'showfunc', default=None,
+    b'notify', b'showfunc', default=None,
 )
 configitem(
-    'notify', 'strip', default=0,
+    b'notify', b'strip', default=0,
 )
 configitem(
-    'notify', 'style', default=None,
+    b'notify', b'style', default=None,
 )
 configitem(
-    'notify', 'template', default=None,
+    b'notify', b'template', default=None,
 )
 configitem(
-    'notify', 'test', default=True,
+    b'notify', b'test', default=True,
 )
 
 # template for single changeset can include email headers.
@@ -257,7 +257,7 @@
 '''
 
 deftemplates = {
-    'changegroup': multiple_template,
+    b'changegroup': multiple_template,
 }
 
 
@@ -266,29 +266,29 @@
 
     def __init__(self, ui, repo, hooktype):
         self.ui = ui
-        cfg = self.ui.config('notify', 'config')
+        cfg = self.ui.config(b'notify', b'config')
         if cfg:
-            self.ui.readconfig(cfg, sections=['usersubs', 'reposubs'])
+            self.ui.readconfig(cfg, sections=[b'usersubs', b'reposubs'])
         self.repo = repo
-        self.stripcount = int(self.ui.config('notify', 'strip'))
+        self.stripcount = int(self.ui.config(b'notify', b'strip'))
         self.root = self.strip(self.repo.root)
-        self.domain = self.ui.config('notify', 'domain')
-        self.mbox = self.ui.config('notify', 'mbox')
-        self.test = self.ui.configbool('notify', 'test')
+        self.domain = self.ui.config(b'notify', b'domain')
+        self.mbox = self.ui.config(b'notify', b'mbox')
+        self.test = self.ui.configbool(b'notify', b'test')
         self.charsets = mail._charsets(self.ui)
         self.subs = self.subscribers()
-        self.merge = self.ui.configbool('notify', 'merge')
-        self.showfunc = self.ui.configbool('notify', 'showfunc')
-        self.messageidseed = self.ui.config('notify', 'messageidseed')
+        self.merge = self.ui.configbool(b'notify', b'merge')
+        self.showfunc = self.ui.configbool(b'notify', b'showfunc')
+        self.messageidseed = self.ui.config(b'notify', b'messageidseed')
         if self.showfunc is None:
-            self.showfunc = self.ui.configbool('diff', 'showfunc')
+            self.showfunc = self.ui.configbool(b'diff', b'showfunc')
 
         mapfile = None
-        template = self.ui.config('notify', hooktype) or self.ui.config(
-            'notify', 'template'
+        template = self.ui.config(b'notify', hooktype) or self.ui.config(
+            b'notify', b'template'
         )
         if not template:
-            mapfile = self.ui.config('notify', 'style')
+            mapfile = self.ui.config(b'notify', b'style')
         if not mapfile and not template:
             template = deftemplates.get(hooktype) or single_template
         spec = logcmdutil.templatespec(template, mapfile)
@@ -300,7 +300,7 @@
         path = util.pconvert(path)
         count = self.stripcount
         while count > 0:
-            c = path.find('/')
+            c = path.find(b'/')
             if c == -1:
                 break
             path = path[c + 1 :]
@@ -312,31 +312,31 @@
 
         addr = stringutil.email(addr.strip())
         if self.domain:
-            a = addr.find('@localhost')
+            a = addr.find(b'@localhost')
             if a != -1:
                 addr = addr[:a]
-            if '@' not in addr:
-                return addr + '@' + self.domain
+            if b'@' not in addr:
+                return addr + b'@' + self.domain
         return addr
 
     def subscribers(self):
         '''return list of email addresses of subscribers to this repo.'''
         subs = set()
-        for user, pats in self.ui.configitems('usersubs'):
-            for pat in pats.split(','):
-                if '#' in pat:
-                    pat, revs = pat.split('#', 1)
+        for user, pats in self.ui.configitems(b'usersubs'):
+            for pat in pats.split(b','):
+                if b'#' in pat:
+                    pat, revs = pat.split(b'#', 1)
                 else:
                     revs = None
                 if fnmatch.fnmatch(self.repo.root, pat.strip()):
                     subs.add((self.fixmail(user), revs))
-        for pat, users in self.ui.configitems('reposubs'):
-            if '#' in pat:
-                pat, revs = pat.split('#', 1)
+        for pat, users in self.ui.configitems(b'reposubs'):
+            if b'#' in pat:
+                pat, revs = pat.split(b'#', 1)
             else:
                 revs = None
             if fnmatch.fnmatch(self.repo.root, pat):
-                for user in users.split(','):
+                for user in users.split(b','):
                     subs.add((self.fixmail(user), revs))
         return [
             (mail.addressencode(self.ui, s, self.charsets, self.test), r)
@@ -350,7 +350,7 @@
         self.t.show(
             ctx,
             changes=ctx.changeset(),
-            baseurl=self.ui.config('web', 'baseurl'),
+            baseurl=self.ui.config(b'web', b'baseurl'),
             root=self.repo.root,
             webroot=self.root,
             **props
@@ -359,7 +359,7 @@
 
     def skipsource(self, source):
         '''true if incoming changes from this source should be skipped.'''
-        ok_sources = self.ui.config('notify', 'sources').split()
+        ok_sources = self.ui.config(b'notify', b'sources').split()
         return source not in ok_sources
 
     def send(self, ctx, count, data):
@@ -371,13 +371,13 @@
             if spec is None:
                 subs.add(sub)
                 continue
-            revs = self.repo.revs('%r and %d:', spec, ctx.rev())
+            revs = self.repo.revs(b'%r and %d:', spec, ctx.rev())
             if len(revs):
                 subs.add(sub)
                 continue
         if len(subs) == 0:
             self.ui.debug(
-                'notify: no subscribers to selected repo ' 'and revset\n'
+                b'notify: no subscribers to selected repo ' b'and revset\n'
             )
             return
 
@@ -408,17 +408,17 @@
                 msg[k] = v
 
         msg[r'Date'] = encoding.strfromlocal(
-            dateutil.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
+            dateutil.datestr(format=b"%a, %d %b %Y %H:%M:%S %1%2")
         )
 
         # try to make subject line exist and be useful
         if not subject:
             if count > 1:
-                subject = _('%s: %d new changesets') % (self.root, count)
+                subject = _(b'%s: %d new changesets') % (self.root, count)
             else:
-                s = ctx.description().lstrip().split('\n', 1)[0].rstrip()
-                subject = '%s: %s' % (self.root, s)
-        maxsubject = int(self.ui.config('notify', 'maxsubject'))
+                s = ctx.description().lstrip().split(b'\n', 1)[0].rstrip()
+                subject = b'%s: %s' % (self.root, s)
+        maxsubject = int(self.ui.config(b'notify', b'maxsubject'))
         if maxsubject:
             subject = stringutil.ellipsis(subject, maxsubject)
         msg[r'Subject'] = encoding.strfromlocal(
@@ -427,8 +427,8 @@
 
         # try to make message have proper sender
         if not sender:
-            sender = self.ui.config('email', 'from') or self.ui.username()
-        if '@' not in sender or '@localhost' in sender:
+            sender = self.ui.config(b'email', b'from') or self.ui.username()
+        if b'@' not in sender or b'@localhost' in sender:
             sender = self.fixmail(sender)
         msg[r'From'] = encoding.strfromlocal(
             mail.addressencode(self.ui, sender, self.charsets, self.test)
@@ -437,16 +437,16 @@
         msg[r'X-Hg-Notification'] = r'changeset %s' % ctx
         if not msg[r'Message-Id']:
             msg[r'Message-Id'] = messageid(ctx, self.domain, self.messageidseed)
-        msg[r'To'] = encoding.strfromlocal(', '.join(sorted(subs)))
+        msg[r'To'] = encoding.strfromlocal(b', '.join(sorted(subs)))
 
         msgtext = encoding.strtolocal(msg.as_string())
         if self.test:
             self.ui.write(msgtext)
-            if not msgtext.endswith('\n'):
-                self.ui.write('\n')
+            if not msgtext.endswith(b'\n'):
+                self.ui.write(b'\n')
         else:
             self.ui.status(
-                _('notify: sending %d subscribers %d changes\n')
+                _(b'notify: sending %d subscribers %d changes\n')
                 % (len(subs), count)
             )
             mail.sendmail(
@@ -459,7 +459,7 @@
 
     def diff(self, ctx, ref=None):
 
-        maxdiff = int(self.ui.config('notify', 'maxdiff'))
+        maxdiff = int(self.ui.config(b'notify', b'maxdiff'))
         prev = ctx.p1().node()
         if ref:
             ref = ref.node()
@@ -468,31 +468,31 @@
         diffopts = patch.diffallopts(self.ui)
         diffopts.showfunc = self.showfunc
         chunks = patch.diff(self.repo, prev, ref, opts=diffopts)
-        difflines = ''.join(chunks).splitlines()
+        difflines = b''.join(chunks).splitlines()
 
-        if self.ui.configbool('notify', 'diffstat'):
-            maxdiffstat = int(self.ui.config('notify', 'maxdiffstat'))
+        if self.ui.configbool(b'notify', b'diffstat'):
+            maxdiffstat = int(self.ui.config(b'notify', b'maxdiffstat'))
             s = patch.diffstat(difflines)
             # s may be nil, don't include the header if it is
             if s:
-                if maxdiffstat >= 0 and s.count("\n") > maxdiffstat + 1:
-                    s = s.split("\n")
-                    msg = _('\ndiffstat (truncated from %d to %d lines):\n\n')
+                if maxdiffstat >= 0 and s.count(b"\n") > maxdiffstat + 1:
+                    s = s.split(b"\n")
+                    msg = _(b'\ndiffstat (truncated from %d to %d lines):\n\n')
                     self.ui.write(msg % (len(s) - 2, maxdiffstat))
-                    self.ui.write("\n".join(s[:maxdiffstat] + s[-2:]))
+                    self.ui.write(b"\n".join(s[:maxdiffstat] + s[-2:]))
                 else:
-                    self.ui.write(_('\ndiffstat:\n\n%s') % s)
+                    self.ui.write(_(b'\ndiffstat:\n\n%s') % s)
 
         if maxdiff == 0:
             return
         elif maxdiff > 0 and len(difflines) > maxdiff:
-            msg = _('\ndiffs (truncated from %d to %d lines):\n\n')
+            msg = _(b'\ndiffs (truncated from %d to %d lines):\n\n')
             self.ui.write(msg % (len(difflines), maxdiff))
             difflines = difflines[:maxdiff]
         elif difflines:
-            self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
+            self.ui.write(_(b'\ndiffs (%d lines):\n\n') % len(difflines))
 
-        self.ui.write("\n".join(difflines))
+        self.ui.write(b"\n".join(difflines))
 
 
 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
@@ -505,17 +505,17 @@
     ctx = repo.unfiltered()[node]
 
     if not n.subs:
-        ui.debug('notify: no subscribers to repository %s\n' % n.root)
+        ui.debug(b'notify: no subscribers to repository %s\n' % n.root)
         return
     if n.skipsource(source):
-        ui.debug('notify: changes have source "%s" - skipping\n' % source)
+        ui.debug(b'notify: changes have source "%s" - skipping\n' % source)
         return
 
     ui.pushbuffer()
-    data = ''
+    data = b''
     count = 0
-    author = ''
-    if hooktype == 'changegroup' or hooktype == 'outgoing':
+    author = b''
+    if hooktype == b'changegroup' or hooktype == b'outgoing':
         for rev in repo.changelog.revs(start=ctx.rev()):
             if n.node(repo[rev]):
                 count += 1
@@ -524,17 +524,17 @@
             else:
                 data += ui.popbuffer()
                 ui.note(
-                    _('notify: suppressing notification for merge %d:%s\n')
+                    _(b'notify: suppressing notification for merge %d:%s\n')
                     % (rev, repo[rev].hex()[:12])
                 )
                 ui.pushbuffer()
         if count:
-            n.diff(ctx, repo['tip'])
+            n.diff(ctx, repo[b'tip'])
     elif ctx.rev() in repo:
         if not n.node(ctx):
             ui.popbuffer()
             ui.note(
-                _('notify: suppressing notification for merge %d:%s\n')
+                _(b'notify: suppressing notification for merge %d:%s\n')
                 % (ctx.rev(), ctx.hex()[:12])
             )
             return
@@ -544,9 +544,9 @@
             author = ctx.user()
 
     data += ui.popbuffer()
-    fromauthor = ui.config('notify', 'fromauthor')
+    fromauthor = ui.config(b'notify', b'fromauthor')
     if author and fromauthor:
-        data = '\n'.join(['From: %s' % author, data])
+        data = b'\n'.join([b'From: %s' % author, data])
 
     if count:
         n.send(ctx, count, data)
@@ -559,9 +559,9 @@
         host = encoding.strtolocal(socket.getfqdn())
     if messageidseed:
         messagehash = hashlib.sha512(ctx.hex() + messageidseed)
-        messageid = '<hg.%s@%s>' % (messagehash.hexdigest()[:64], host)
+        messageid = b'<hg.%s@%s>' % (messagehash.hexdigest()[:64], host)
     else:
-        messageid = '<hg.%s.%d.%d@%s>' % (
+        messageid = b'<hg.%s.%d.%d@%s>' % (
             ctx,
             int(time.time()),
             hash(ctx.repo().root),
--- a/hgext/pager.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/pager.py	Sun Oct 06 09:48:39 2019 -0400
@@ -35,29 +35,29 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 configtable = {}
 configitem = registrar.configitem(configtable)
 
 configitem(
-    'pager', 'attend', default=lambda: attended,
+    b'pager', b'attend', default=lambda: attended,
 )
 
 
 def uisetup(ui):
     def pagecmd(orig, ui, options, cmd, cmdfunc):
-        auto = options['pager'] == 'auto'
+        auto = options[b'pager'] == b'auto'
         if auto and not ui.pageractive:
             usepager = False
-            attend = ui.configlist('pager', 'attend')
-            ignore = ui.configlist('pager', 'ignore')
+            attend = ui.configlist(b'pager', b'attend')
+            ignore = ui.configlist(b'pager', b'ignore')
             cmds, _ = cmdutil.findcmd(cmd, commands.table)
 
             for cmd in cmds:
-                var = 'attend-%s' % cmd
-                if ui.config('pager', var, None):
-                    usepager = ui.configbool('pager', var, True)
+                var = b'attend-%s' % cmd
+                if ui.config(b'pager', var, None):
+                    usepager = ui.configbool(b'pager', var, True)
                     break
                 if cmd in attend or (cmd not in ignore and not attend):
                     usepager = True
@@ -69,13 +69,13 @@
                 # core code doesn't know about attend, so we have to
                 # lobotomize the ignore list so that the extension's
                 # behavior is preserved.
-                ui.setconfig('pager', 'ignore', '', 'pager')
-                ui.pager('extension-via-attend-' + cmd)
+                ui.setconfig(b'pager', b'ignore', b'', b'pager')
+                ui.pager(b'extension-via-attend-' + cmd)
             else:
                 ui.disablepager()
         return orig(ui, options, cmd, cmdfunc)
 
-    extensions.wrapfunction(dispatch, '_runcommand', pagecmd)
+    extensions.wrapfunction(dispatch, b'_runcommand', pagecmd)
 
 
-attended = ['annotate', 'cat', 'diff', 'export', 'glog', 'log', 'qdiff']
+attended = [b'annotate', b'cat', b'diff', b'export', b'glog', b'log', b'qdiff']
--- a/hgext/patchbomb.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/patchbomb.py	Sun Oct 06 09:48:39 2019 -0400
@@ -110,34 +110,34 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    'patchbomb', 'bundletype', default=None,
+    b'patchbomb', b'bundletype', default=None,
 )
 configitem(
-    'patchbomb', 'bcc', default=None,
+    b'patchbomb', b'bcc', default=None,
 )
 configitem(
-    'patchbomb', 'cc', default=None,
+    b'patchbomb', b'cc', default=None,
 )
 configitem(
-    'patchbomb', 'confirm', default=False,
+    b'patchbomb', b'confirm', default=False,
 )
 configitem(
-    'patchbomb', 'flagtemplate', default=None,
+    b'patchbomb', b'flagtemplate', default=None,
 )
 configitem(
-    'patchbomb', 'from', default=None,
+    b'patchbomb', b'from', default=None,
 )
 configitem(
-    'patchbomb', 'intro', default='auto',
+    b'patchbomb', b'intro', default=b'auto',
 )
 configitem(
-    'patchbomb', 'publicurl', default=None,
+    b'patchbomb', b'publicurl', default=None,
 )
 configitem(
-    'patchbomb', 'reply-to', default=None,
+    b'patchbomb', b'reply-to', default=None,
 )
 configitem(
-    'patchbomb', 'to', default=None,
+    b'patchbomb', b'to', default=None,
 )
 
 if pycompat.ispy3:
@@ -149,7 +149,7 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 
 def _addpullheader(seq, ctx):
@@ -159,9 +159,9 @@
     # experimental config: patchbomb.publicurl
     # waiting for some logic that check that the changeset are available on the
     # destination before patchbombing anything.
-    publicurl = repo.ui.config('patchbomb', 'publicurl')
+    publicurl = repo.ui.config(b'patchbomb', b'publicurl')
     if publicurl:
-        return 'Available At %s\n' '#              hg pull %s -r %s' % (
+        return b'Available At %s\n' b'#              hg pull %s -r %s' % (
             publicurl,
             publicurl,
             ctx,
@@ -170,53 +170,53 @@
 
 
 def uisetup(ui):
-    cmdutil.extraexport.append('pullurl')
-    cmdutil.extraexportmap['pullurl'] = _addpullheader
+    cmdutil.extraexport.append(b'pullurl')
+    cmdutil.extraexportmap[b'pullurl'] = _addpullheader
 
 
 def reposetup(ui, repo):
     if not repo.local():
         return
-    repo._wlockfreeprefix.add('last-email.txt')
+    repo._wlockfreeprefix.add(b'last-email.txt')
 
 
-def prompt(ui, prompt, default=None, rest=':'):
+def prompt(ui, prompt, default=None, rest=b':'):
     if default:
-        prompt += ' [%s]' % default
+        prompt += b' [%s]' % default
     return ui.prompt(prompt + rest, default)
 
 
 def introwanted(ui, opts, number):
     '''is an introductory message apparently wanted?'''
-    introconfig = ui.config('patchbomb', 'intro')
-    if opts.get('intro') or opts.get('desc'):
+    introconfig = ui.config(b'patchbomb', b'intro')
+    if opts.get(b'intro') or opts.get(b'desc'):
         intro = True
-    elif introconfig == 'always':
+    elif introconfig == b'always':
         intro = True
-    elif introconfig == 'never':
+    elif introconfig == b'never':
         intro = False
-    elif introconfig == 'auto':
+    elif introconfig == b'auto':
         intro = number > 1
     else:
         ui.write_err(
-            _('warning: invalid patchbomb.intro value "%s"\n') % introconfig
+            _(b'warning: invalid patchbomb.intro value "%s"\n') % introconfig
         )
-        ui.write_err(_('(should be one of always, never, auto)\n'))
+        ui.write_err(_(b'(should be one of always, never, auto)\n'))
         intro = number > 1
     return intro
 
 
 def _formatflags(ui, repo, rev, flags):
     """build flag string optionally by template"""
-    tmpl = ui.config('patchbomb', 'flagtemplate')
+    tmpl = ui.config(b'patchbomb', b'flagtemplate')
     if not tmpl:
-        return ' '.join(flags)
+        return b' '.join(flags)
     out = util.stringio()
-    opts = {'template': templater.unquotestring(tmpl)}
-    with formatter.templateformatter(ui, out, 'patchbombflag', opts) as fm:
+    opts = {b'template': templater.unquotestring(tmpl)}
+    with formatter.templateformatter(ui, out, b'patchbombflag', opts) as fm:
         fm.startitem()
         fm.context(ctx=repo[rev])
-        fm.write('flags', '%s', fm.formatlist(flags, name='flag'))
+        fm.write(b'flags', b'%s', fm.formatlist(flags, name=b'flag'))
     return out.getvalue()
 
 
@@ -224,13 +224,13 @@
     """build prefix to patch subject"""
     flag = _formatflags(ui, repo, rev, flags)
     if flag:
-        flag = ' ' + flag
+        flag = b' ' + flag
 
     if not numbered:
-        return '[PATCH%s]' % flag
+        return b'[PATCH%s]' % flag
     else:
-        tlen = len("%d" % total)
-        return '[PATCH %0*d of %d%s]' % (tlen, idx, total, flag)
+        tlen = len(b"%d" % total)
+        return b'[PATCH %0*d of %d%s]' % (tlen, idx, total, flag)
 
 
 def makepatch(
@@ -248,28 +248,29 @@
 
     desc = []
     node = None
-    body = ''
+    body = b''
 
     for line in patchlines:
-        if line.startswith('#'):
-            if line.startswith('# Node ID'):
+        if line.startswith(b'#'):
+            if line.startswith(b'# Node ID'):
                 node = line.split()[-1]
             continue
-        if line.startswith('diff -r') or line.startswith('diff --git'):
+        if line.startswith(b'diff -r') or line.startswith(b'diff --git'):
             break
         desc.append(line)
 
     if not patchname and not node:
         raise ValueError
 
-    if opts.get('attach') and not opts.get('body'):
+    if opts.get(b'attach') and not opts.get(b'body'):
         body = (
-            '\n'.join(desc[1:]).strip() or 'Patch subject is complete summary.'
+            b'\n'.join(desc[1:]).strip()
+            or b'Patch subject is complete summary.'
         )
-        body += '\n\n\n'
+        body += b'\n\n\n'
 
-    if opts.get('plain'):
-        while patchlines and patchlines[0].startswith('# '):
+    if opts.get(b'plain'):
+        while patchlines and patchlines[0].startswith(b'# '):
             patchlines.pop(0)
         if patchlines:
             patchlines.pop(0)
@@ -277,19 +278,19 @@
             patchlines.pop(0)
 
     ds = patch.diffstat(patchlines)
-    if opts.get('diffstat'):
-        body += ds + '\n\n'
+    if opts.get(b'diffstat'):
+        body += ds + b'\n\n'
 
-    addattachment = opts.get('attach') or opts.get('inline')
-    if not addattachment or opts.get('body'):
-        body += '\n'.join(patchlines)
+    addattachment = opts.get(b'attach') or opts.get(b'inline')
+    if not addattachment or opts.get(b'body'):
+        body += b'\n'.join(patchlines)
 
     if addattachment:
         msg = emimemultipart.MIMEMultipart()
         if body:
-            msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
+            msg.attach(mail.mimeencode(ui, body, _charsets, opts.get(b'test')))
         p = mail.mimetextpatch(
-            '\n'.join(patchlines), 'x-patch', opts.get('test')
+            b'\n'.join(patchlines), b'x-patch', opts.get(b'test')
         )
         binnode = nodemod.bin(node)
         # if node is mq patch, it will have the patch file's name as a tag
@@ -297,38 +298,38 @@
             patchtags = [
                 t
                 for t in repo.nodetags(binnode)
-                if t.endswith('.patch') or t.endswith('.diff')
+                if t.endswith(b'.patch') or t.endswith(b'.diff')
             ]
             if patchtags:
                 patchname = patchtags[0]
             elif total > 1:
                 patchname = cmdutil.makefilename(
-                    repo[node], '%b-%n.patch', seqno=idx, total=total
+                    repo[node], b'%b-%n.patch', seqno=idx, total=total
                 )
             else:
-                patchname = cmdutil.makefilename(repo[node], '%b.patch')
+                patchname = cmdutil.makefilename(repo[node], b'%b.patch')
         disposition = r'inline'
-        if opts.get('attach'):
+        if opts.get(b'attach'):
             disposition = r'attachment'
         p[r'Content-Disposition'] = (
             disposition + r'; filename=' + encoding.strfromlocal(patchname)
         )
         msg.attach(p)
     else:
-        msg = mail.mimetextpatch(body, display=opts.get('test'))
+        msg = mail.mimetextpatch(body, display=opts.get(b'test'))
 
     prefix = _formatprefix(
-        ui, repo, rev, opts.get('flag'), idx, total, numbered
+        ui, repo, rev, opts.get(b'flag'), idx, total, numbered
     )
-    subj = desc[0].strip().rstrip('. ')
+    subj = desc[0].strip().rstrip(b'. ')
     if not numbered:
-        subj = ' '.join([prefix, opts.get('subject') or subj])
+        subj = b' '.join([prefix, opts.get(b'subject') or subj])
     else:
-        subj = ' '.join([prefix, subj])
-    msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
-    msg['X-Mercurial-Node'] = node
-    msg['X-Mercurial-Series-Index'] = '%i' % idx
-    msg['X-Mercurial-Series-Total'] = '%i' % total
+        subj = b' '.join([prefix, subj])
+    msg[b'Subject'] = mail.headencode(ui, subj, _charsets, opts.get(b'test'))
+    msg[b'X-Mercurial-Node'] = node
+    msg[b'X-Mercurial-Series-Index'] = b'%i' % idx
+    msg[b'X-Mercurial-Series-Total'] = b'%i' % total
     return msg, subj, ds
 
 
@@ -338,17 +339,17 @@
     Each patch in the list is itself a list of lines.
     """
     ui = repo.ui
-    prev = repo['.'].rev()
+    prev = repo[b'.'].rev()
     for r in revs:
         if r == prev and (repo[None].files() or repo[None].deleted()):
             ui.warn(
-                _('warning: working directory has ' 'uncommitted changes\n')
+                _(b'warning: working directory has ' b'uncommitted changes\n')
             )
         output = stringio()
         cmdutil.exportfile(
             repo, [r], output, opts=patch.difffeatureopts(ui, opts, git=True)
         )
-        yield output.getvalue().split('\n')
+        yield output.getvalue().split(b'\n')
 
 
 def _getbundle(repo, dest, **opts):
@@ -360,9 +361,9 @@
     The bundle is a returned as a single in-memory binary blob.
     """
     ui = repo.ui
-    tmpdir = pycompat.mkdtemp(prefix='hg-email-bundle-')
-    tmpfn = os.path.join(tmpdir, 'bundle')
-    btype = ui.config('patchbomb', 'bundletype')
+    tmpdir = pycompat.mkdtemp(prefix=b'hg-email-bundle-')
+    tmpfn = os.path.join(tmpdir, b'bundle')
+    btype = ui.config(b'patchbomb', b'bundletype')
     if btype:
         opts[r'type'] = btype
     try:
@@ -389,13 +390,13 @@
         body = open(opts.get(r'desc')).read()
     else:
         ui.write(
-            _('\nWrite the introductory message for the ' 'patch series.\n\n')
+            _(b'\nWrite the introductory message for the ' b'patch series.\n\n')
         )
         body = ui.edit(
-            defaultbody, sender, repopath=repo.path, action='patchbombbody'
+            defaultbody, sender, repopath=repo.path, action=b'patchbombbody'
         )
         # Save series description in case sendmail fails
-        msgfile = repo.vfs('last-email.txt', 'wb')
+        msgfile = repo.vfs(b'last-email.txt', b'wb')
         msgfile.write(body)
         msgfile.close()
     return body
@@ -410,16 +411,16 @@
     ui = repo.ui
     _charsets = mail._charsets(ui)
     subj = opts.get(r'subject') or prompt(
-        ui, 'Subject:', 'A bundle for your repository'
+        ui, b'Subject:', b'A bundle for your repository'
     )
 
-    body = _getdescription(repo, '', sender, **opts)
+    body = _getdescription(repo, b'', sender, **opts)
     msg = emimemultipart.MIMEMultipart()
     if body:
         msg.attach(mail.mimeencode(ui, body, _charsets, opts.get(r'test')))
     datapart = emimebase.MIMEBase(r'application', r'x-mercurial-bundle')
     datapart.set_payload(bundle)
-    bundlename = '%s.hg' % opts.get(r'bundlename', 'bundle')
+    bundlename = b'%s.hg' % opts.get(r'bundlename', b'bundle')
     datapart.add_header(
         r'Content-Disposition',
         r'attachment',
@@ -427,7 +428,7 @@
     )
     emailencoders.encode_base64(datapart)
     msg.attach(datapart)
-    msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get(r'test'))
+    msg[b'Subject'] = mail.headencode(ui, subj, _charsets, opts.get(r'test'))
     return [(msg, subj, None)]
 
 
@@ -443,24 +444,24 @@
         ui, repo, revs.last(), opts.get(r'flag'), 0, len(patches), numbered=True
     )
     subj = opts.get(r'subject') or prompt(
-        ui, '(optional) Subject: ', rest=prefix, default=''
+        ui, b'(optional) Subject: ', rest=prefix, default=b''
     )
     if not subj:
         return None  # skip intro if the user doesn't bother
 
-    subj = prefix + ' ' + subj
+    subj = prefix + b' ' + subj
 
-    body = ''
+    body = b''
     if opts.get(r'diffstat'):
         # generate a cumulative diffstat of the whole patch series
         diffstat = patch.diffstat(sum(patches, []))
-        body = '\n' + diffstat
+        body = b'\n' + diffstat
     else:
         diffstat = None
 
     body = _getdescription(repo, body, sender, **opts)
     msg = mail.mimeencode(ui, body, _charsets, opts.get(r'test'))
-    msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get(r'test'))
+    msg[b'Subject'] = mail.headencode(ui, subj, _charsets, opts.get(r'test'))
     return (msg, subj, diffstat)
 
 
@@ -477,7 +478,7 @@
     patches = list(_getpatches(repo, revs, **opts))
     msgs = []
 
-    ui.write(_('this patch series consists of %d patches.\n\n') % len(patches))
+    ui.write(_(b'this patch series consists of %d patches.\n\n') % len(patches))
 
     # build the intro message, or skip it if the user declines
     if introwanted(ui, bytesopts, len(patches)):
@@ -514,132 +515,149 @@
 def _getoutgoing(repo, dest, revs):
     '''Return the revisions present locally but not in dest'''
     ui = repo.ui
-    url = ui.expandpath(dest or 'default-push', dest or 'default')
+    url = ui.expandpath(dest or b'default-push', dest or b'default')
     url = hg.parseurl(url)[0]
-    ui.status(_('comparing with %s\n') % util.hidepassword(url))
+    ui.status(_(b'comparing with %s\n') % util.hidepassword(url))
 
     revs = [r for r in revs if r >= 0]
     if not revs:
         revs = [repo.changelog.tiprev()]
-    revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
+    revs = repo.revs(b'outgoing(%s) and ::%ld', dest or b'', revs)
     if not revs:
-        ui.status(_("no changes found\n"))
+        ui.status(_(b"no changes found\n"))
     return revs
 
 
 def _msgid(node, timestamp):
     hostname = encoding.strtolocal(socket.getfqdn())
-    hostname = encoding.environ.get('HGHOSTNAME', hostname)
-    return '<%s.%d@%s>' % (node, timestamp, hostname)
+    hostname = encoding.environ.get(b'HGHOSTNAME', hostname)
+    return b'<%s.%d@%s>' % (node, timestamp, hostname)
 
 
 emailopts = [
-    ('', 'body', None, _('send patches as inline message text (default)')),
-    ('a', 'attach', None, _('send patches as attachments')),
-    ('i', 'inline', None, _('send patches as inline attachments')),
-    (
-        '',
-        'bcc',
-        [],
-        _('email addresses of blind carbon copy recipients'),
-        _('EMAIL'),
-    ),
-    ('c', 'cc', [], _('email addresses of copy recipients'), _('EMAIL')),
-    ('', 'confirm', None, _('ask for confirmation before sending')),
-    ('d', 'diffstat', None, _('add diffstat output to messages')),
-    ('', 'date', '', _('use the given date as the sending date'), _('DATE')),
+    (b'', b'body', None, _(b'send patches as inline message text (default)')),
+    (b'a', b'attach', None, _(b'send patches as attachments')),
+    (b'i', b'inline', None, _(b'send patches as inline attachments')),
     (
-        '',
-        'desc',
-        '',
-        _('use the given file as the series description'),
-        _('FILE'),
+        b'',
+        b'bcc',
+        [],
+        _(b'email addresses of blind carbon copy recipients'),
+        _(b'EMAIL'),
     ),
-    ('f', 'from', '', _('email address of sender'), _('EMAIL')),
-    ('n', 'test', None, _('print messages that would be sent')),
+    (b'c', b'cc', [], _(b'email addresses of copy recipients'), _(b'EMAIL')),
+    (b'', b'confirm', None, _(b'ask for confirmation before sending')),
+    (b'd', b'diffstat', None, _(b'add diffstat output to messages')),
     (
-        'm',
-        'mbox',
-        '',
-        _('write messages to mbox file instead of sending them'),
-        _('FILE'),
+        b'',
+        b'date',
+        b'',
+        _(b'use the given date as the sending date'),
+        _(b'DATE'),
     ),
     (
-        '',
-        'reply-to',
-        [],
-        _('email addresses replies should be sent to'),
-        _('EMAIL'),
+        b'',
+        b'desc',
+        b'',
+        _(b'use the given file as the series description'),
+        _(b'FILE'),
+    ),
+    (b'f', b'from', b'', _(b'email address of sender'), _(b'EMAIL')),
+    (b'n', b'test', None, _(b'print messages that would be sent')),
+    (
+        b'm',
+        b'mbox',
+        b'',
+        _(b'write messages to mbox file instead of sending them'),
+        _(b'FILE'),
     ),
     (
-        's',
-        'subject',
-        '',
-        _('subject of first message (intro or single patch)'),
-        _('TEXT'),
+        b'',
+        b'reply-to',
+        [],
+        _(b'email addresses replies should be sent to'),
+        _(b'EMAIL'),
     ),
-    ('', 'in-reply-to', '', _('message identifier to reply to'), _('MSGID')),
-    ('', 'flag', [], _('flags to add in subject prefixes'), _('FLAG')),
-    ('t', 'to', [], _('email addresses of recipients'), _('EMAIL')),
+    (
+        b's',
+        b'subject',
+        b'',
+        _(b'subject of first message (intro or single patch)'),
+        _(b'TEXT'),
+    ),
+    (
+        b'',
+        b'in-reply-to',
+        b'',
+        _(b'message identifier to reply to'),
+        _(b'MSGID'),
+    ),
+    (b'', b'flag', [], _(b'flags to add in subject prefixes'), _(b'FLAG')),
+    (b't', b'to', [], _(b'email addresses of recipients'), _(b'EMAIL')),
 ]
 
 
 @command(
-    'email',
+    b'email',
     [
-        ('g', 'git', None, _('use git extended diff format')),
-        ('', 'plain', None, _('omit hg patch header')),
+        (b'g', b'git', None, _(b'use git extended diff format')),
+        (b'', b'plain', None, _(b'omit hg patch header')),
         (
-            'o',
-            'outgoing',
+            b'o',
+            b'outgoing',
             None,
-            _('send changes not found in the target repository'),
+            _(b'send changes not found in the target repository'),
         ),
         (
-            'b',
-            'bundle',
+            b'b',
+            b'bundle',
             None,
-            _('send changes not in target as a binary bundle'),
+            _(b'send changes not in target as a binary bundle'),
         ),
         (
-            'B',
-            'bookmark',
-            '',
-            _('send changes only reachable by given bookmark'),
-            _('BOOKMARK'),
+            b'B',
+            b'bookmark',
+            b'',
+            _(b'send changes only reachable by given bookmark'),
+            _(b'BOOKMARK'),
         ),
         (
-            '',
-            'bundlename',
-            'bundle',
-            _('name of the bundle attachment file'),
-            _('NAME'),
+            b'',
+            b'bundlename',
+            b'bundle',
+            _(b'name of the bundle attachment file'),
+            _(b'NAME'),
         ),
-        ('r', 'rev', [], _('a revision to send'), _('REV')),
+        (b'r', b'rev', [], _(b'a revision to send'), _(b'REV')),
         (
-            '',
-            'force',
+            b'',
+            b'force',
             None,
             _(
-                'run even when remote repository is unrelated '
-                '(with -b/--bundle)'
+                b'run even when remote repository is unrelated '
+                b'(with -b/--bundle)'
             ),
         ),
         (
-            '',
-            'base',
+            b'',
+            b'base',
             [],
             _(
-                'a base changeset to specify instead of a destination '
-                '(with -b/--bundle)'
+                b'a base changeset to specify instead of a destination '
+                b'(with -b/--bundle)'
             ),
-            _('REV'),
+            _(b'REV'),
         ),
-        ('', 'intro', None, _('send an introduction email for a single patch')),
+        (
+            b'',
+            b'intro',
+            None,
+            _(b'send an introduction email for a single patch'),
+        ),
     ]
     + emailopts
     + cmdutil.remoteopts,
-    _('hg email [OPTION]... [DEST]...'),
+    _(b'hg email [OPTION]... [DEST]...'),
     helpcategory=command.CATEGORY_IMPORT_EXPORT,
 )
 def email(ui, repo, *revs, **opts):
@@ -731,33 +749,35 @@
 
     _charsets = mail._charsets(ui)
 
-    bundle = opts.get('bundle')
-    date = opts.get('date')
-    mbox = opts.get('mbox')
-    outgoing = opts.get('outgoing')
-    rev = opts.get('rev')
-    bookmark = opts.get('bookmark')
+    bundle = opts.get(b'bundle')
+    date = opts.get(b'date')
+    mbox = opts.get(b'mbox')
+    outgoing = opts.get(b'outgoing')
+    rev = opts.get(b'rev')
+    bookmark = opts.get(b'bookmark')
 
-    if not (opts.get('test') or mbox):
+    if not (opts.get(b'test') or mbox):
         # really sending
         mail.validateconfig(ui)
 
     if not (revs or rev or outgoing or bundle or bookmark):
-        raise error.Abort(_('specify at least one changeset with -B, -r or -o'))
+        raise error.Abort(
+            _(b'specify at least one changeset with -B, -r or -o')
+        )
 
     if outgoing and bundle:
         raise error.Abort(
             _(
-                "--outgoing mode always on with --bundle;"
-                " do not re-specify --outgoing"
+                b"--outgoing mode always on with --bundle;"
+                b" do not re-specify --outgoing"
             )
         )
     if rev and bookmark:
-        raise error.Abort(_("-r and -B are mutually exclusive"))
+        raise error.Abort(_(b"-r and -B are mutually exclusive"))
 
     if outgoing or bundle:
         if len(revs) > 1:
-            raise error.Abort(_("too many destinations"))
+            raise error.Abort(_(b"too many destinations"))
         if revs:
             dest = revs[0]
         else:
@@ -766,32 +786,32 @@
 
     if rev:
         if revs:
-            raise error.Abort(_('use only one form to specify the revision'))
+            raise error.Abort(_(b'use only one form to specify the revision'))
         revs = rev
     elif bookmark:
         if bookmark not in repo._bookmarks:
-            raise error.Abort(_("bookmark '%s' not found") % bookmark)
+            raise error.Abort(_(b"bookmark '%s' not found") % bookmark)
         revs = scmutil.bookmarkrevs(repo, bookmark)
 
     revs = scmutil.revrange(repo, revs)
     if outgoing:
         revs = _getoutgoing(repo, dest, revs)
     if bundle:
-        opts['revs'] = ["%d" % r for r in revs]
+        opts[b'revs'] = [b"%d" % r for r in revs]
 
     # check if revision exist on the public destination
-    publicurl = repo.ui.config('patchbomb', 'publicurl')
+    publicurl = repo.ui.config(b'patchbomb', b'publicurl')
     if publicurl:
-        repo.ui.debug('checking that revision exist in the public repo\n')
+        repo.ui.debug(b'checking that revision exist in the public repo\n')
         try:
             publicpeer = hg.peer(repo, {}, publicurl)
         except error.RepoError:
             repo.ui.write_err(
-                _('unable to access public repo: %s\n') % publicurl
+                _(b'unable to access public repo: %s\n') % publicurl
             )
             raise
-        if not publicpeer.capable('known'):
-            repo.ui.debug('skipping existence checks: public repo too old\n')
+        if not publicpeer.capable(b'known'):
+            repo.ui.debug(b'skipping existence checks: public repo too old\n')
         else:
             out = [repo[r] for r in revs]
             known = publicpeer.known(h.node() for h in out)
@@ -801,16 +821,16 @@
                     missing.append(h)
             if missing:
                 if len(missing) > 1:
-                    msg = _('public "%s" is missing %s and %i others')
+                    msg = _(b'public "%s" is missing %s and %i others')
                     msg %= (publicurl, missing[0], len(missing) - 1)
                 else:
-                    msg = _('public url %s is missing %s')
+                    msg = _(b'public url %s is missing %s')
                     msg %= (publicurl, missing[0])
                 missingrevs = [ctx.rev() for ctx in missing]
-                revhint = ' '.join(
-                    '-r %s' % h for h in repo.set('heads(%ld)', missingrevs)
+                revhint = b' '.join(
+                    b'-r %s' % h for h in repo.set(b'heads(%ld)', missingrevs)
                 )
-                hint = _("use 'hg push %s %s'") % (publicurl, revhint)
+                hint = _(b"use 'hg push %s %s'") % (publicurl, revhint)
                 raise error.Abort(msg, hint=hint)
 
     # start
@@ -824,10 +844,10 @@
 
     # deprecated config: patchbomb.from
     sender = (
-        opts.get('from')
-        or ui.config('email', 'from')
-        or ui.config('patchbomb', 'from')
-        or prompt(ui, 'From', ui.username())
+        opts.get(b'from')
+        or ui.config(b'email', b'from')
+        or ui.config(b'patchbomb', b'from')
+        or prompt(ui, b'From', ui.username())
     )
 
     if bundle:
@@ -843,98 +863,100 @@
 
     def getaddrs(header, ask=False, default=None):
         configkey = header.lower()
-        opt = header.replace('-', '_').lower()
+        opt = header.replace(b'-', b'_').lower()
         addrs = opts.get(opt)
         if addrs:
-            showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
-            return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
+            showaddrs.append(b'%s: %s' % (header, b', '.join(addrs)))
+            return mail.addrlistencode(ui, addrs, _charsets, opts.get(b'test'))
 
         # not on the command line: fallback to config and then maybe ask
-        addr = ui.config('email', configkey) or ui.config(
-            'patchbomb', configkey
+        addr = ui.config(b'email', configkey) or ui.config(
+            b'patchbomb', configkey
         )
         if not addr:
-            specified = ui.hasconfig('email', configkey) or ui.hasconfig(
-                'patchbomb', configkey
+            specified = ui.hasconfig(b'email', configkey) or ui.hasconfig(
+                b'patchbomb', configkey
             )
             if not specified and ask:
                 addr = prompt(ui, header, default=default)
         if addr:
-            showaddrs.append('%s: %s' % (header, addr))
-            return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
+            showaddrs.append(b'%s: %s' % (header, addr))
+            return mail.addrlistencode(ui, [addr], _charsets, opts.get(b'test'))
         elif default:
             return mail.addrlistencode(
-                ui, [default], _charsets, opts.get('test')
+                ui, [default], _charsets, opts.get(b'test')
             )
         return []
 
-    to = getaddrs('To', ask=True)
+    to = getaddrs(b'To', ask=True)
     if not to:
         # we can get here in non-interactive mode
-        raise error.Abort(_('no recipient addresses provided'))
-    cc = getaddrs('Cc', ask=True, default='')
-    bcc = getaddrs('Bcc')
-    replyto = getaddrs('Reply-To')
+        raise error.Abort(_(b'no recipient addresses provided'))
+    cc = getaddrs(b'Cc', ask=True, default=b'')
+    bcc = getaddrs(b'Bcc')
+    replyto = getaddrs(b'Reply-To')
 
-    confirm = ui.configbool('patchbomb', 'confirm')
-    confirm |= bool(opts.get('diffstat') or opts.get('confirm'))
+    confirm = ui.configbool(b'patchbomb', b'confirm')
+    confirm |= bool(opts.get(b'diffstat') or opts.get(b'confirm'))
 
     if confirm:
-        ui.write(_('\nFinal summary:\n\n'), label='patchbomb.finalsummary')
-        ui.write(('From: %s\n' % sender), label='patchbomb.from')
+        ui.write(_(b'\nFinal summary:\n\n'), label=b'patchbomb.finalsummary')
+        ui.write((b'From: %s\n' % sender), label=b'patchbomb.from')
         for addr in showaddrs:
-            ui.write('%s\n' % addr, label='patchbomb.to')
+            ui.write(b'%s\n' % addr, label=b'patchbomb.to')
         for m, subj, ds in msgs:
-            ui.write(('Subject: %s\n' % subj), label='patchbomb.subject')
+            ui.write((b'Subject: %s\n' % subj), label=b'patchbomb.subject')
             if ds:
-                ui.write(ds, label='patchbomb.diffstats')
-        ui.write('\n')
+                ui.write(ds, label=b'patchbomb.diffstats')
+        ui.write(b'\n')
         if ui.promptchoice(
-            _('are you sure you want to send (yn)?' '$$ &Yes $$ &No')
+            _(b'are you sure you want to send (yn)?' b'$$ &Yes $$ &No')
         ):
-            raise error.Abort(_('patchbomb canceled'))
+            raise error.Abort(_(b'patchbomb canceled'))
 
-    ui.write('\n')
+    ui.write(b'\n')
 
-    parent = opts.get('in_reply_to') or None
+    parent = opts.get(b'in_reply_to') or None
     # angle brackets may be omitted, they're not semantically part of the msg-id
     if parent is not None:
-        if not parent.startswith('<'):
-            parent = '<' + parent
-        if not parent.endswith('>'):
-            parent += '>'
+        if not parent.startswith(b'<'):
+            parent = b'<' + parent
+        if not parent.endswith(b'>'):
+            parent += b'>'
 
     sender_addr = eutil.parseaddr(encoding.strfromlocal(sender))[1]
-    sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
+    sender = mail.addressencode(ui, sender, _charsets, opts.get(b'test'))
     sendmail = None
     firstpatch = None
-    progress = ui.makeprogress(_('sending'), unit=_('emails'), total=len(msgs))
+    progress = ui.makeprogress(
+        _(b'sending'), unit=_(b'emails'), total=len(msgs)
+    )
     for i, (m, subj, ds) in enumerate(msgs):
         try:
-            m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
+            m[b'Message-Id'] = genmsgid(m[b'X-Mercurial-Node'])
             if not firstpatch:
-                firstpatch = m['Message-Id']
-            m['X-Mercurial-Series-Id'] = firstpatch
+                firstpatch = m[b'Message-Id']
+            m[b'X-Mercurial-Series-Id'] = firstpatch
         except TypeError:
-            m['Message-Id'] = genmsgid('patchbomb')
+            m[b'Message-Id'] = genmsgid(b'patchbomb')
         if parent:
-            m['In-Reply-To'] = parent
-            m['References'] = parent
-        if not parent or 'X-Mercurial-Node' not in m:
-            parent = m['Message-Id']
+            m[b'In-Reply-To'] = parent
+            m[b'References'] = parent
+        if not parent or b'X-Mercurial-Node' not in m:
+            parent = m[b'Message-Id']
 
-        m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
-        m['Date'] = eutil.formatdate(start_time[0], localtime=True)
+        m[b'User-Agent'] = b'Mercurial-patchbomb/%s' % util.version()
+        m[b'Date'] = eutil.formatdate(start_time[0], localtime=True)
 
         start_time = (start_time[0] + 1, start_time[1])
-        m['From'] = sender
-        m['To'] = ', '.join(to)
+        m[b'From'] = sender
+        m[b'To'] = b', '.join(to)
         if cc:
-            m['Cc'] = ', '.join(cc)
+            m[b'Cc'] = b', '.join(cc)
         if bcc:
-            m['Bcc'] = ', '.join(bcc)
+            m[b'Bcc'] = b', '.join(bcc)
         if replyto:
-            m['Reply-To'] = ', '.join(replyto)
+            m[b'Reply-To'] = b', '.join(replyto)
         # Fix up all headers to be native strings.
         # TODO(durin42): this should probably be cleaned up above in the future.
         if pycompat.ispy3:
@@ -952,24 +974,24 @@
                     change = True
                 if change:
                     m[hdr] = val
-        if opts.get('test'):
-            ui.status(_('displaying '), subj, ' ...\n')
-            ui.pager('email')
+        if opts.get(b'test'):
+            ui.status(_(b'displaying '), subj, b' ...\n')
+            ui.pager(b'email')
             generator = _bytesgenerator(ui, mangle_from_=False)
             try:
                 generator.flatten(m, 0)
-                ui.write('\n')
+                ui.write(b'\n')
             except IOError as inst:
                 if inst.errno != errno.EPIPE:
                     raise
         else:
             if not sendmail:
                 sendmail = mail.connect(ui, mbox=mbox)
-            ui.status(_('sending '), subj, ' ...\n')
+            ui.status(_(b'sending '), subj, b' ...\n')
             progress.update(i, item=subj)
             if not mbox:
                 # Exim does not remove the Bcc field
-                del m['Bcc']
+                del m[b'Bcc']
             fp = stringio()
             generator = _bytesgenerator(fp, mangle_from_=False)
             generator.flatten(m, 0)
--- a/hgext/phabricator.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/phabricator.py	Sun Oct 06 09:48:39 2019 -0400
@@ -79,7 +79,7 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 eh = exthelper.exthelper()
 
@@ -747,7 +747,7 @@
                 if old.description() != newdesc:
                     if old.phase() == phases.public:
                         ui.warn(
-                            _("warning: not updating public commit %s\n")
+                            _(b"warning: not updating public commit %s\n")
                             % scmutil.formatchangeid(old)
                         )
                         continue
--- a/hgext/purge.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/purge.py	Sun Oct 06 09:48:39 2019 -0400
@@ -40,29 +40,29 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 
 @command(
-    'purge|clean',
+    b'purge|clean',
     [
-        ('a', 'abort-on-err', None, _('abort if an error occurs')),
-        ('', 'all', None, _('purge ignored files too')),
-        ('', 'dirs', None, _('purge empty directories')),
-        ('', 'files', None, _('purge files')),
-        ('p', 'print', None, _('print filenames instead of deleting them')),
+        (b'a', b'abort-on-err', None, _(b'abort if an error occurs')),
+        (b'', b'all', None, _(b'purge ignored files too')),
+        (b'', b'dirs', None, _(b'purge empty directories')),
+        (b'', b'files', None, _(b'purge files')),
+        (b'p', b'print', None, _(b'print filenames instead of deleting them')),
         (
-            '0',
-            'print0',
+            b'0',
+            b'print0',
             None,
             _(
-                'end filenames with NUL, for use with xargs'
-                ' (implies -p/--print)'
+                b'end filenames with NUL, for use with xargs'
+                b' (implies -p/--print)'
             ),
         ),
     ]
     + cmdutil.walkopts,
-    _('hg purge [OPTION]... [DIR]...'),
+    _(b'hg purge [OPTION]... [DIR]...'),
     helpcategory=command.CATEGORY_MAINTENANCE,
 )
 def purge(ui, repo, *dirs, **opts):
@@ -97,14 +97,14 @@
     '''
     opts = pycompat.byteskwargs(opts)
 
-    act = not opts.get('print')
-    eol = '\n'
-    if opts.get('print0'):
-        eol = '\0'
+    act = not opts.get(b'print')
+    eol = b'\n'
+    if opts.get(b'print0'):
+        eol = b'\0'
         act = False  # --print0 implies --print
 
-    removefiles = opts.get('files')
-    removedirs = opts.get('dirs')
+    removefiles = opts.get(b'files')
+    removedirs = opts.get(b'dirs')
 
     if not removefiles and not removedirs:
         removefiles = True
@@ -115,13 +115,13 @@
     paths = mergemod.purge(
         repo,
         match,
-        ignored=opts.get('all', False),
+        ignored=opts.get(b'all', False),
         removeemptydirs=removedirs,
         removefiles=removefiles,
-        abortonerror=opts.get('abort_on_err'),
+        abortonerror=opts.get(b'abort_on_err'),
         noop=not act,
     )
 
     for path in paths:
         if not act:
-            ui.write('%s%s' % (path, eol))
+            ui.write(b'%s%s' % (path, eol))
--- a/hgext/rebase.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/rebase.py	Sun Oct 06 09:48:39 2019 -0400
@@ -56,11 +56,11 @@
 
 # Indicates that a revision needs to be rebased
 revtodo = -1
-revtodostr = '-1'
+revtodostr = b'-1'
 
 # legacy revstates no longer needed in current code
 # -2: nullmerge, -3: revignored, -4: revprecursor, -5: revpruned
-legacystates = {'-2', '-3', '-4', '-5'}
+legacystates = {b'-2', b'-3', b'-4', b'-5'}
 
 cmdtable = {}
 command = registrar.command(cmdtable)
@@ -68,7 +68,7 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 
 def _nothingtorebase():
@@ -76,16 +76,16 @@
 
 
 def _savegraft(ctx, extra):
-    s = ctx.extra().get('source', None)
+    s = ctx.extra().get(b'source', None)
     if s is not None:
-        extra['source'] = s
-    s = ctx.extra().get('intermediate-source', None)
+        extra[b'source'] = s
+    s = ctx.extra().get(b'intermediate-source', None)
     if s is not None:
-        extra['intermediate-source'] = s
+        extra[b'intermediate-source'] = s
 
 
 def _savebranch(ctx, extra):
-    extra['branch'] = ctx.branch()
+    extra[b'branch'] = ctx.branch()
 
 
 def _destrebase(repo, sourceset, destspace=None):
@@ -94,7 +94,7 @@
     Please wrap destutil.destmerge instead."""
     return destutil.destmerge(
         repo,
-        action='rebase',
+        action=b'rebase',
         sourceset=sourceset,
         onheadcheck=False,
         destspace=destspace,
@@ -104,7 +104,7 @@
 revsetpredicate = registrar.revsetpredicate()
 
 
-@revsetpredicate('_destrebase')
+@revsetpredicate(b'_destrebase')
 def _revsetdestrebase(repo, subset, x):
     # ``_rebasedefaultdest()``
 
@@ -118,13 +118,13 @@
     return subset & smartset.baseset([_destrebase(repo, sourceset)])
 
 
-@revsetpredicate('_destautoorphanrebase')
+@revsetpredicate(b'_destautoorphanrebase')
 def _revsetdestautoorphanrebase(repo, subset, x):
     # ``_destautoorphanrebase()``
 
     # automatic rebase destination for a single orphan revision.
     unfi = repo.unfiltered()
-    obsoleted = unfi.revs('obsolete()')
+    obsoleted = unfi.revs(b'obsolete()')
 
     src = revset.getset(repo, subset, x).first()
 
@@ -134,7 +134,7 @@
     dests = destutil.orphanpossibledestination(repo, src)
     if len(dests) > 1:
         raise error.Abort(
-            _("ambiguous automatic rebase: %r could end up on any of %r")
+            _(b"ambiguous automatic rebase: %r could end up on any of %r")
             % (src, dests)
         )
     # We have zero or one destination, so we can just return here.
@@ -143,15 +143,19 @@
 
 def _ctxdesc(ctx):
     """short description for a context"""
-    desc = '%d:%s "%s"' % (ctx.rev(), ctx, ctx.description().split('\n', 1)[0])
+    desc = b'%d:%s "%s"' % (
+        ctx.rev(),
+        ctx,
+        ctx.description().split(b'\n', 1)[0],
+    )
     repo = ctx.repo()
     names = []
     for nsname, ns in repo.names.iteritems():
-        if nsname == 'branches':
+        if nsname == b'branches':
             continue
         names.extend(ns.names(repo, ctx.node()))
     if names:
-        desc += ' (%s)' % ' '.join(names)
+        desc += b' (%s)' % b' '.join(names)
     return desc
 
 
@@ -185,22 +189,22 @@
         self.destmap = {}
         self.skipped = set()
 
-        self.collapsef = opts.get('collapse', False)
+        self.collapsef = opts.get(b'collapse', False)
         self.collapsemsg = cmdutil.logmessage(ui, opts)
-        self.date = opts.get('date', None)
+        self.date = opts.get(b'date', None)
 
-        e = opts.get('extrafn')  # internal, used by e.g. hgsubversion
+        e = opts.get(b'extrafn')  # internal, used by e.g. hgsubversion
         self.extrafns = [_savegraft]
         if e:
             self.extrafns = [e]
 
-        self.backupf = ui.configbool('rewrite', 'backup-bundle')
-        self.keepf = opts.get('keep', False)
-        self.keepbranchesf = opts.get('keepbranches', False)
+        self.backupf = ui.configbool(b'rewrite', b'backup-bundle')
+        self.keepf = opts.get(b'keep', False)
+        self.keepbranchesf = opts.get(b'keepbranches', False)
         self.obsoletenotrebased = {}
         self.obsoletewithoutsuccessorindestination = set()
         self.inmemory = inmemory
-        self.stateobj = statemod.cmdstate(repo, 'rebasestate')
+        self.stateobj = statemod.cmdstate(repo, b'rebasestate')
 
     @property
     def repo(self):
@@ -213,96 +217,96 @@
         """Store the current status to allow recovery"""
         if tr:
             tr.addfilegenerator(
-                'rebasestate',
-                ('rebasestate',),
+                b'rebasestate',
+                (b'rebasestate',),
                 self._writestatus,
-                location='plain',
+                location=b'plain',
             )
         else:
-            with self.repo.vfs("rebasestate", "w") as f:
+            with self.repo.vfs(b"rebasestate", b"w") as f:
                 self._writestatus(f)
 
     def _writestatus(self, f):
         repo = self.repo
         assert repo.filtername is None
-        f.write(repo[self.originalwd].hex() + '\n')
+        f.write(repo[self.originalwd].hex() + b'\n')
         # was "dest". we now write dest per src root below.
-        f.write('\n')
-        f.write(repo[self.external].hex() + '\n')
-        f.write('%d\n' % int(self.collapsef))
-        f.write('%d\n' % int(self.keepf))
-        f.write('%d\n' % int(self.keepbranchesf))
-        f.write('%s\n' % (self.activebookmark or ''))
+        f.write(b'\n')
+        f.write(repo[self.external].hex() + b'\n')
+        f.write(b'%d\n' % int(self.collapsef))
+        f.write(b'%d\n' % int(self.keepf))
+        f.write(b'%d\n' % int(self.keepbranchesf))
+        f.write(b'%s\n' % (self.activebookmark or b''))
         destmap = self.destmap
         for d, v in self.state.iteritems():
             oldrev = repo[d].hex()
             if v >= 0:
                 newrev = repo[v].hex()
             else:
-                newrev = "%d" % v
+                newrev = b"%d" % v
             destnode = repo[destmap[d]].hex()
-            f.write("%s:%s:%s\n" % (oldrev, newrev, destnode))
-        repo.ui.debug('rebase status stored\n')
+            f.write(b"%s:%s:%s\n" % (oldrev, newrev, destnode))
+        repo.ui.debug(b'rebase status stored\n')
 
     def restorestatus(self):
         """Restore a previously stored status"""
         if not self.stateobj.exists():
-            cmdutil.wrongtooltocontinue(self.repo, _('rebase'))
+            cmdutil.wrongtooltocontinue(self.repo, _(b'rebase'))
 
         data = self._read()
-        self.repo.ui.debug('rebase status resumed\n')
+        self.repo.ui.debug(b'rebase status resumed\n')
 
-        self.originalwd = data['originalwd']
-        self.destmap = data['destmap']
-        self.state = data['state']
-        self.skipped = data['skipped']
-        self.collapsef = data['collapse']
-        self.keepf = data['keep']
-        self.keepbranchesf = data['keepbranches']
-        self.external = data['external']
-        self.activebookmark = data['activebookmark']
+        self.originalwd = data[b'originalwd']
+        self.destmap = data[b'destmap']
+        self.state = data[b'state']
+        self.skipped = data[b'skipped']
+        self.collapsef = data[b'collapse']
+        self.keepf = data[b'keep']
+        self.keepbranchesf = data[b'keepbranches']
+        self.external = data[b'external']
+        self.activebookmark = data[b'activebookmark']
 
     def _read(self):
         self.prepared = True
         repo = self.repo
         assert repo.filtername is None
         data = {
-            'keepbranches': None,
-            'collapse': None,
-            'activebookmark': None,
-            'external': nullrev,
-            'keep': None,
-            'originalwd': None,
+            b'keepbranches': None,
+            b'collapse': None,
+            b'activebookmark': None,
+            b'external': nullrev,
+            b'keep': None,
+            b'originalwd': None,
         }
         legacydest = None
         state = {}
         destmap = {}
 
         if True:
-            f = repo.vfs("rebasestate")
+            f = repo.vfs(b"rebasestate")
             for i, l in enumerate(f.read().splitlines()):
                 if i == 0:
-                    data['originalwd'] = repo[l].rev()
+                    data[b'originalwd'] = repo[l].rev()
                 elif i == 1:
                     # this line should be empty in newer version. but legacy
                     # clients may still use it
                     if l:
                         legacydest = repo[l].rev()
                 elif i == 2:
-                    data['external'] = repo[l].rev()
+                    data[b'external'] = repo[l].rev()
                 elif i == 3:
-                    data['collapse'] = bool(int(l))
+                    data[b'collapse'] = bool(int(l))
                 elif i == 4:
-                    data['keep'] = bool(int(l))
+                    data[b'keep'] = bool(int(l))
                 elif i == 5:
-                    data['keepbranches'] = bool(int(l))
-                elif i == 6 and not (len(l) == 81 and ':' in l):
+                    data[b'keepbranches'] = bool(int(l))
+                elif i == 6 and not (len(l) == 81 and b':' in l):
                     # line 6 is a recent addition, so for backwards
                     # compatibility check that the line doesn't look like the
                     # oldrev:newrev lines
-                    data['activebookmark'] = l
+                    data[b'activebookmark'] = l
                 else:
-                    args = l.split(':')
+                    args = l.split(b':')
                     oldrev = repo[args[0]].rev()
                     newrev = args[1]
                     if newrev in legacystates:
@@ -318,23 +322,23 @@
                     else:
                         state[oldrev] = repo[newrev].rev()
 
-        if data['keepbranches'] is None:
-            raise error.Abort(_('.hg/rebasestate is incomplete'))
+        if data[b'keepbranches'] is None:
+            raise error.Abort(_(b'.hg/rebasestate is incomplete'))
 
-        data['destmap'] = destmap
-        data['state'] = state
+        data[b'destmap'] = destmap
+        data[b'state'] = state
         skipped = set()
         # recompute the set of skipped revs
-        if not data['collapse']:
+        if not data[b'collapse']:
             seen = set(destmap.values())
             for old, new in sorted(state.items()):
                 if new != revtodo and new in seen:
                     skipped.add(old)
                 seen.add(new)
-        data['skipped'] = skipped
+        data[b'skipped'] = skipped
         repo.ui.debug(
-            'computed skipped revs: %s\n'
-            % (' '.join('%d' % r for r in sorted(skipped)) or '')
+            b'computed skipped revs: %s\n'
+            % (b' '.join(b'%d' % r for r in sorted(skipped)) or b'')
         )
 
         return data
@@ -346,7 +350,7 @@
         destmap:        {srcrev: destrev} destination revisions
         """
         self.obsoletenotrebased = {}
-        if not self.ui.configbool('experimental', 'rebaseskipobsolete'):
+        if not self.ui.configbool(b'experimental', b'rebaseskipobsolete'):
             return
         obsoleteset = set(obsoleterevs)
         (
@@ -369,14 +373,14 @@
                 clearcollapsemsg(self.repo)
                 self.repo.ui.warn(
                     _(
-                        'rebase aborted (no revision is removed,'
-                        ' only broken state is cleared)\n'
+                        b'rebase aborted (no revision is removed,'
+                        b' only broken state is cleared)\n'
                     )
                 )
                 return 0
             else:
-                msg = _('cannot continue inconsistent rebase')
-                hint = _('use "hg rebase --abort" to clear broken state')
+                msg = _(b'cannot continue inconsistent rebase')
+                hint = _(b'use "hg rebase --abort" to clear broken state')
                 raise error.Abort(msg, hint=hint)
 
         if isabort:
@@ -390,28 +394,28 @@
         rebaseset = destmap.keys()
         allowunstable = obsolete.isenabled(self.repo, obsolete.allowunstableopt)
         if not (self.keepf or allowunstable) and self.repo.revs(
-            'first(children(%ld) - %ld)', rebaseset, rebaseset
+            b'first(children(%ld) - %ld)', rebaseset, rebaseset
         ):
             raise error.Abort(
                 _(
-                    "can't remove original changesets with"
-                    " unrebased descendants"
+                    b"can't remove original changesets with"
+                    b" unrebased descendants"
                 ),
-                hint=_('use --keep to keep original changesets'),
+                hint=_(b'use --keep to keep original changesets'),
             )
 
         result = buildstate(self.repo, destmap, self.collapsef)
 
         if not result:
             # Empty state built, nothing to rebase
-            self.ui.status(_('nothing to rebase\n'))
+            self.ui.status(_(b'nothing to rebase\n'))
             return _nothingtorebase()
 
-        for root in self.repo.set('roots(%ld)', rebaseset):
+        for root in self.repo.set(b'roots(%ld)', rebaseset):
             if not self.keepf and not root.mutable():
                 raise error.Abort(
-                    _("can't rebase public changeset %s") % root,
-                    hint=_("see 'hg help phases' for details"),
+                    _(b"can't rebase public changeset %s") % root,
+                    hint=_(b"see 'hg help phases' for details"),
                 )
 
         (self.originalwd, self.destmap, self.state) = result
@@ -419,7 +423,7 @@
             dests = set(self.destmap.values())
             if len(dests) != 1:
                 raise error.Abort(
-                    _('--collapse does not work with multiple destinations')
+                    _(b'--collapse does not work with multiple destinations')
                 )
             destrev = next(iter(dests))
             destancestors = self.repo.changelog.ancestors(
@@ -430,7 +434,7 @@
         for destrev in sorted(set(destmap.values())):
             dest = self.repo[destrev]
             if dest.closesbranch() and not self.keepbranchesf:
-                self.ui.status(_('reopening closed branch head %s\n') % dest)
+                self.ui.status(_(b'reopening closed branch head %s\n') % dest)
 
         self.prepared = True
 
@@ -439,13 +443,13 @@
             from mercurial.context import overlayworkingctx
 
             self.wctx = overlayworkingctx(self.repo)
-            self.repo.ui.debug("rebasing in-memory\n")
+            self.repo.ui.debug(b"rebasing in-memory\n")
         else:
             self.wctx = self.repo[None]
-            self.repo.ui.debug("rebasing on disk\n")
+            self.repo.ui.debug(b"rebasing on disk\n")
         self.repo.ui.log(
-            "rebase",
-            "using in-memory rebase: %r\n",
+            b"rebase",
+            b"using in-memory rebase: %r\n",
             self.inmemory,
             rebase_imm_used=self.inmemory,
         )
@@ -464,7 +468,7 @@
                     branches.add(repo[rev].branch())
                     if len(branches) > 1:
                         raise error.Abort(
-                            _('cannot collapse multiple named ' 'branches')
+                            _(b'cannot collapse multiple named ' b'branches')
                         )
 
         # Calculate self.obsoletenotrebased
@@ -486,27 +490,27 @@
 
         cands = [k for k, v in self.state.iteritems() if v == revtodo]
         p = repo.ui.makeprogress(
-            _("rebasing"), unit=_('changesets'), total=len(cands)
+            _(b"rebasing"), unit=_(b'changesets'), total=len(cands)
         )
 
         def progress(ctx):
-            p.increment(item=("%d:%s" % (ctx.rev(), ctx)))
+            p.increment(item=(b"%d:%s" % (ctx.rev(), ctx)))
 
         allowdivergence = self.ui.configbool(
-            'experimental', 'evolution.allowdivergence'
+            b'experimental', b'evolution.allowdivergence'
         )
         for subset in sortsource(self.destmap):
-            sortedrevs = self.repo.revs('sort(%ld, -topo)', subset)
+            sortedrevs = self.repo.revs(b'sort(%ld, -topo)', subset)
             if not allowdivergence:
                 sortedrevs -= self.repo.revs(
-                    'descendants(%ld) and not %ld',
+                    b'descendants(%ld) and not %ld',
                     self.obsoletewithoutsuccessorindestination,
                     self.obsoletewithoutsuccessorindestination,
                 )
             for rev in sortedrevs:
                 self._rebasenode(tr, rev, allowdivergence, progress)
         p.complete()
-        ui.note(_('rebase merging completed\n'))
+        ui.note(_(b'rebase merging completed\n'))
 
     def _concludenode(self, rev, p1, p2, editor, commitmsg=None):
         '''Commit the wd changes with parents p1 and p2.
@@ -520,15 +524,15 @@
         date = self.date
         if date is None:
             date = ctx.date()
-        extra = {'rebase_source': ctx.hex()}
+        extra = {b'rebase_source': ctx.hex()}
         for c in self.extrafns:
             c(ctx, extra)
         keepbranch = self.keepbranchesf and repo[p1].branch() != ctx.branch()
         destphase = max(ctx.phase(), phases.draft)
-        overrides = {('phases', 'new-commit'): destphase}
+        overrides = {(b'phases', b'new-commit'): destphase}
         if keepbranch:
-            overrides[('ui', 'allowemptycommit')] = True
-        with repo.ui.configoverride(overrides, 'rebase'):
+            overrides[(b'ui', b'allowemptycommit')] = True
+        with repo.ui.configoverride(overrides, b'rebase'):
             if self.inmemory:
                 newnode = commitmemorynode(
                     repo,
@@ -567,15 +571,15 @@
         ctx = repo[rev]
         desc = _ctxdesc(ctx)
         if self.state[rev] == rev:
-            ui.status(_('already rebased %s\n') % desc)
+            ui.status(_(b'already rebased %s\n') % desc)
         elif (
             not allowdivergence
             and rev in self.obsoletewithoutsuccessorindestination
         ):
             msg = (
                 _(
-                    'note: not rebasing %s and its descendants as '
-                    'this would cause divergence\n'
+                    b'note: not rebasing %s and its descendants as '
+                    b'this would cause divergence\n'
                 )
                 % desc
             )
@@ -585,12 +589,13 @@
             succ = self.obsoletenotrebased[rev]
             if succ is None:
                 msg = (
-                    _('note: not rebasing %s, it has no ' 'successor\n') % desc
+                    _(b'note: not rebasing %s, it has no ' b'successor\n')
+                    % desc
                 )
             else:
                 succdesc = _ctxdesc(repo[succ])
                 msg = _(
-                    'note: not rebasing %s, already in ' 'destination as %s\n'
+                    b'note: not rebasing %s, already in ' b'destination as %s\n'
                 ) % (desc, succdesc)
             repo.ui.status(msg)
             # Make clearrebased aware state[rev] is not a true successor
@@ -602,7 +607,7 @@
             )
             self.state[rev] = dest
         elif self.state[rev] == revtodo:
-            ui.status(_('rebasing %s\n') % desc)
+            ui.status(_(b'rebasing %s\n') % desc)
             progressfn(ctx)
             p1, p2, base = defineparents(
                 repo,
@@ -613,10 +618,10 @@
                 self.obsoletenotrebased,
             )
             if not self.inmemory and len(repo[None].parents()) == 2:
-                repo.ui.debug('resuming interrupted rebase\n')
+                repo.ui.debug(b'resuming interrupted rebase\n')
             else:
-                overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
-                with ui.configoverride(overrides, 'rebase'):
+                overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
+                with ui.configoverride(overrides, b'rebase'):
                     stats = rebasenode(
                         repo,
                         rev,
@@ -632,13 +637,13 @@
                         else:
                             raise error.InterventionRequired(
                                 _(
-                                    'unresolved conflicts (see hg '
-                                    'resolve, then hg rebase --continue)'
+                                    b'unresolved conflicts (see hg '
+                                    b'resolve, then hg rebase --continue)'
                                 )
                             )
             if not self.collapsef:
                 merging = p2 != nullrev
-                editform = cmdutil.mergeeditform(merging, 'rebase')
+                editform = cmdutil.mergeeditform(merging, b'rebase')
                 editor = cmdutil.getcommiteditor(
                     editform=editform, **pycompat.strkwargs(opts)
                 )
@@ -653,22 +658,22 @@
             # Update the state
             if newnode is not None:
                 self.state[rev] = repo[newnode].rev()
-                ui.debug('rebased as %s\n' % short(newnode))
+                ui.debug(b'rebased as %s\n' % short(newnode))
             else:
                 if not self.collapsef:
                     ui.warn(
                         _(
-                            'note: not rebasing %s, its destination already '
-                            'has all its changes\n'
+                            b'note: not rebasing %s, its destination already '
+                            b'has all its changes\n'
                         )
                         % desc
                     )
                     self.skipped.add(rev)
                 self.state[rev] = p1
-                ui.debug('next revision set to %d\n' % p1)
+                ui.debug(b'next revision set to %d\n' % p1)
         else:
             ui.status(
-                _('already rebased %s as %s\n') % (desc, repo[self.state[rev]])
+                _(b'already rebased %s as %s\n') % (desc, repo[self.state[rev]])
             )
         if not tr:
             # When not using single transaction, store state after each
@@ -679,7 +684,7 @@
 
     def _finishrebase(self):
         repo, ui, opts = self.repo, self.ui, self.opts
-        fm = ui.formatter('rebase', opts)
+        fm = ui.formatter(b'rebase', opts)
         fm.startitem()
         if self.collapsef:
             p1, p2, _base = defineparents(
@@ -690,15 +695,15 @@
                 self.skipped,
                 self.obsoletenotrebased,
             )
-            editopt = opts.get('edit')
-            editform = 'rebase.collapse'
+            editopt = opts.get(b'edit')
+            editform = b'rebase.collapse'
             if self.collapsemsg:
                 commitmsg = self.collapsemsg
             else:
-                commitmsg = 'Collapsed revision'
+                commitmsg = b'Collapsed revision'
                 for rebased in sorted(self.state):
                     if rebased not in self.skipped:
-                        commitmsg += '\n* %s' % repo[rebased].description()
+                        commitmsg += b'\n* %s' % repo[rebased].description()
                 editopt = True
             editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
             revtoreuse = max(self.state)
@@ -712,7 +717,7 @@
                 for oldrev in self.state:
                     self.state[oldrev] = newrev
 
-        if 'qtip' in repo.tags():
+        if b'qtip' in repo.tags():
             updatemq(repo, self.state, self.skipped, **pycompat.strkwargs(opts))
 
         # restore original working directory
@@ -722,7 +727,7 @@
             # original directory is a parent of rebase set root or ignored
             newwd = self.originalwd
         if newwd not in [c.rev() for c in repo[None].parents()]:
-            ui.note(_("update back to initial working directory parent\n"))
+            ui.note(_(b"update back to initial working directory parent\n"))
             hg.updaterepo(repo, newwd, overwrite=False)
 
         collapsedas = None
@@ -743,17 +748,17 @@
         clearstatus(repo)
         clearcollapsemsg(repo)
 
-        ui.note(_("rebase completed\n"))
-        util.unlinkpath(repo.sjoin('undo'), ignoremissing=True)
+        ui.note(_(b"rebase completed\n"))
+        util.unlinkpath(repo.sjoin(b'undo'), ignoremissing=True)
         if self.skipped:
             skippedlen = len(self.skipped)
-            ui.note(_("%d revisions have been skipped\n") % skippedlen)
+            ui.note(_(b"%d revisions have been skipped\n") % skippedlen)
         fm.end()
 
         if (
             self.activebookmark
             and self.activebookmark in repo._bookmarks
-            and repo['.'].node() == repo._bookmarks[self.activebookmark]
+            and repo[b'.'].node() == repo._bookmarks[self.activebookmark]
         ):
             bookmarks.activate(repo, self.activebookmark)
 
@@ -775,9 +780,9 @@
             cleanup = True
             if immutable:
                 repo.ui.warn(
-                    _("warning: can't clean up public changesets %s\n")
-                    % ', '.join(bytes(repo[r]) for r in immutable),
-                    hint=_("see 'hg help phases' for details"),
+                    _(b"warning: can't clean up public changesets %s\n")
+                    % b', '.join(bytes(repo[r]) for r in immutable),
+                    hint=_(b"see 'hg help phases' for details"),
                 )
                 cleanup = False
 
@@ -787,8 +792,8 @@
             if descendants - set(rebased):
                 repo.ui.warn(
                     _(
-                        "warning: new changesets detected on "
-                        "destination branch, can't strip\n"
+                        b"warning: new changesets detected on "
+                        b"destination branch, can't strip\n"
                     )
                 )
                 cleanup = False
@@ -797,13 +802,13 @@
                 shouldupdate = False
                 if rebased:
                     strippoints = [
-                        c.node() for c in repo.set('roots(%ld)', rebased)
+                        c.node() for c in repo.set(b'roots(%ld)', rebased)
                     ]
 
                 updateifonnodes = set(rebased)
                 updateifonnodes.update(self.destmap.values())
                 updateifonnodes.add(self.originalwd)
-                shouldupdate = repo['.'].rev() in updateifonnodes
+                shouldupdate = repo[b'.'].rev() in updateifonnodes
 
                 # Update away from the rebase if necessary
                 if shouldupdate or needupdate(repo, self.state):
@@ -822,67 +827,73 @@
             clearstatus(repo)
             clearcollapsemsg(repo)
             if not suppwarns:
-                repo.ui.warn(_('rebase aborted\n'))
+                repo.ui.warn(_(b'rebase aborted\n'))
         return 0
 
 
 @command(
-    'rebase',
+    b'rebase',
     [
         (
-            's',
-            'source',
-            '',
-            _('rebase the specified changeset and descendants'),
-            _('REV'),
+            b's',
+            b'source',
+            b'',
+            _(b'rebase the specified changeset and descendants'),
+            _(b'REV'),
         ),
         (
-            'b',
-            'base',
-            '',
-            _('rebase everything from branching point of specified changeset'),
-            _('REV'),
+            b'b',
+            b'base',
+            b'',
+            _(b'rebase everything from branching point of specified changeset'),
+            _(b'REV'),
         ),
-        ('r', 'rev', [], _('rebase these revisions'), _('REV')),
-        ('d', 'dest', '', _('rebase onto the specified changeset'), _('REV')),
-        ('', 'collapse', False, _('collapse the rebased changesets')),
+        (b'r', b'rev', [], _(b'rebase these revisions'), _(b'REV')),
         (
-            'm',
-            'message',
-            '',
-            _('use text as collapse commit message'),
-            _('TEXT'),
+            b'd',
+            b'dest',
+            b'',
+            _(b'rebase onto the specified changeset'),
+            _(b'REV'),
         ),
-        ('e', 'edit', False, _('invoke editor on commit messages')),
+        (b'', b'collapse', False, _(b'collapse the rebased changesets')),
         (
-            'l',
-            'logfile',
-            '',
-            _('read collapse commit message from file'),
-            _('FILE'),
+            b'm',
+            b'message',
+            b'',
+            _(b'use text as collapse commit message'),
+            _(b'TEXT'),
+        ),
+        (b'e', b'edit', False, _(b'invoke editor on commit messages')),
+        (
+            b'l',
+            b'logfile',
+            b'',
+            _(b'read collapse commit message from file'),
+            _(b'FILE'),
         ),
-        ('k', 'keep', False, _('keep original changesets')),
-        ('', 'keepbranches', False, _('keep original branch names')),
-        ('D', 'detach', False, _('(DEPRECATED)')),
-        ('i', 'interactive', False, _('(DEPRECATED)')),
-        ('t', 'tool', '', _('specify merge tool')),
-        ('', 'stop', False, _('stop interrupted rebase')),
-        ('c', 'continue', False, _('continue an interrupted rebase')),
-        ('a', 'abort', False, _('abort an interrupted rebase')),
+        (b'k', b'keep', False, _(b'keep original changesets')),
+        (b'', b'keepbranches', False, _(b'keep original branch names')),
+        (b'D', b'detach', False, _(b'(DEPRECATED)')),
+        (b'i', b'interactive', False, _(b'(DEPRECATED)')),
+        (b't', b'tool', b'', _(b'specify merge tool')),
+        (b'', b'stop', False, _(b'stop interrupted rebase')),
+        (b'c', b'continue', False, _(b'continue an interrupted rebase')),
+        (b'a', b'abort', False, _(b'abort an interrupted rebase')),
         (
-            '',
-            'auto-orphans',
-            '',
+            b'',
+            b'auto-orphans',
+            b'',
             _(
-                'automatically rebase orphan revisions '
-                'in the specified revset (EXPERIMENTAL)'
+                b'automatically rebase orphan revisions '
+                b'in the specified revset (EXPERIMENTAL)'
             ),
         ),
     ]
     + cmdutil.dryrunopts
     + cmdutil.formatteropts
     + cmdutil.confirmopts,
-    _('[-s REV | -b REV] [-d REV] [OPTION]'),
+    _(b'[-s REV | -b REV] [-d REV] [OPTION]'),
     helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
 )
 def rebase(ui, repo, **opts):
@@ -1009,21 +1020,21 @@
 
     """
     opts = pycompat.byteskwargs(opts)
-    inmemory = ui.configbool('rebase', 'experimental.inmemory')
-    dryrun = opts.get('dry_run')
-    confirm = opts.get('confirm')
-    selactions = [k for k in ['abort', 'stop', 'continue'] if opts.get(k)]
+    inmemory = ui.configbool(b'rebase', b'experimental.inmemory')
+    dryrun = opts.get(b'dry_run')
+    confirm = opts.get(b'confirm')
+    selactions = [k for k in [b'abort', b'stop', b'continue'] if opts.get(k)]
     if len(selactions) > 1:
         raise error.Abort(
-            _('cannot use --%s with --%s') % tuple(selactions[:2])
+            _(b'cannot use --%s with --%s') % tuple(selactions[:2])
         )
     action = selactions[0] if selactions else None
     if dryrun and action:
-        raise error.Abort(_('cannot specify both --dry-run and --%s') % action)
+        raise error.Abort(_(b'cannot specify both --dry-run and --%s') % action)
     if confirm and action:
-        raise error.Abort(_('cannot specify both --confirm and --%s') % action)
+        raise error.Abort(_(b'cannot specify both --confirm and --%s') % action)
     if dryrun and confirm:
-        raise error.Abort(_('cannot specify both --confirm and --dry-run'))
+        raise error.Abort(_(b'cannot specify both --confirm and --dry-run'))
 
     if action or repo.currenttransaction() is not None:
         # in-memory rebase is not compatible with resuming rebases.
@@ -1031,35 +1042,35 @@
         # fail the entire transaction.)
         inmemory = False
 
-    if opts.get('auto_orphans'):
+    if opts.get(b'auto_orphans'):
         for key in opts:
-            if key != 'auto_orphans' and opts.get(key):
+            if key != b'auto_orphans' and opts.get(key):
                 raise error.Abort(
-                    _('--auto-orphans is incompatible with %s') % ('--' + key)
+                    _(b'--auto-orphans is incompatible with %s') % (b'--' + key)
                 )
-        userrevs = list(repo.revs(opts.get('auto_orphans')))
-        opts['rev'] = [revsetlang.formatspec('%ld and orphan()', userrevs)]
-        opts['dest'] = '_destautoorphanrebase(SRC)'
+        userrevs = list(repo.revs(opts.get(b'auto_orphans')))
+        opts[b'rev'] = [revsetlang.formatspec(b'%ld and orphan()', userrevs)]
+        opts[b'dest'] = b'_destautoorphanrebase(SRC)'
 
     if dryrun or confirm:
         return _dryrunrebase(ui, repo, action, opts)
-    elif action == 'stop':
+    elif action == b'stop':
         rbsrt = rebaseruntime(repo, ui)
         with repo.wlock(), repo.lock():
             rbsrt.restorestatus()
             if rbsrt.collapsef:
-                raise error.Abort(_("cannot stop in --collapse session"))
+                raise error.Abort(_(b"cannot stop in --collapse session"))
             allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
             if not (rbsrt.keepf or allowunstable):
                 raise error.Abort(
                     _(
-                        "cannot remove original changesets with"
-                        " unrebased descendants"
+                        b"cannot remove original changesets with"
+                        b" unrebased descendants"
                     ),
                     hint=_(
-                        'either enable obsmarkers to allow unstable '
-                        'revisions or use --keep to keep original '
-                        'changesets'
+                        b'either enable obsmarkers to allow unstable '
+                        b'revisions or use --keep to keep original '
+                        b'changesets'
                     ),
                 )
             if needupdate(repo, rbsrt.state):
@@ -1072,14 +1083,14 @@
         try:
             # in-memory merge doesn't support conflicts, so if we hit any, abort
             # and re-run as an on-disk merge.
-            overrides = {('rebase', 'singletransaction'): True}
-            with ui.configoverride(overrides, 'rebase'):
+            overrides = {(b'rebase', b'singletransaction'): True}
+            with ui.configoverride(overrides, b'rebase'):
                 return _dorebase(ui, repo, action, opts, inmemory=inmemory)
         except error.InMemoryMergeConflictsError:
             ui.warn(
                 _(
-                    'hit merge conflicts; re-running rebase without in-memory'
-                    ' merge\n'
+                    b'hit merge conflicts; re-running rebase without in-memory'
+                    b' merge\n'
                 )
             )
             # TODO: Make in-memory merge not use the on-disk merge state, so
@@ -1094,18 +1105,18 @@
 
 def _dryrunrebase(ui, repo, action, opts):
     rbsrt = rebaseruntime(repo, ui, inmemory=True, opts=opts)
-    confirm = opts.get('confirm')
+    confirm = opts.get(b'confirm')
     if confirm:
-        ui.status(_('starting in-memory rebase\n'))
+        ui.status(_(b'starting in-memory rebase\n'))
     else:
         ui.status(
-            _('starting dry-run rebase; repository will not be ' 'changed\n')
+            _(b'starting dry-run rebase; repository will not be ' b'changed\n')
         )
     with repo.wlock(), repo.lock():
         needsabort = True
         try:
-            overrides = {('rebase', 'singletransaction'): True}
-            with ui.configoverride(overrides, 'rebase'):
+            overrides = {(b'rebase', b'singletransaction'): True}
+            with ui.configoverride(overrides, b'rebase'):
                 _origrebase(
                     ui,
                     repo,
@@ -1116,14 +1127,14 @@
                     leaveunfinished=True,
                 )
         except error.InMemoryMergeConflictsError:
-            ui.status(_('hit a merge conflict\n'))
+            ui.status(_(b'hit a merge conflict\n'))
             return 1
         except error.Abort:
             needsabort = False
             raise
         else:
             if confirm:
-                ui.status(_('rebase completed successfully\n'))
+                ui.status(_(b'rebase completed successfully\n'))
                 if not ui.promptchoice(
                     _(b'apply changes (yn)?' b'$$ &Yes $$ &No')
                 ):
@@ -1137,8 +1148,8 @@
             else:
                 ui.status(
                     _(
-                        'dry-run rebase completed successfully; run without'
-                        ' -n/--dry-run to perform this rebase\n'
+                        b'dry-run rebase completed successfully; run without'
+                        b' -n/--dry-run to perform this rebase\n'
                     )
                 )
             return 0
@@ -1158,51 +1169,53 @@
 def _origrebase(
     ui, repo, action, opts, rbsrt, inmemory=False, leaveunfinished=False
 ):
-    assert action != 'stop'
+    assert action != b'stop'
     with repo.wlock(), repo.lock():
         # Validate input and define rebasing points
-        destf = opts.get('dest', None)
-        srcf = opts.get('source', None)
-        basef = opts.get('base', None)
-        revf = opts.get('rev', [])
+        destf = opts.get(b'dest', None)
+        srcf = opts.get(b'source', None)
+        basef = opts.get(b'base', None)
+        revf = opts.get(b'rev', [])
         # search default destination in this space
         # used in the 'hg pull --rebase' case, see issue 5214.
-        destspace = opts.get('_destspace')
-        if opts.get('interactive'):
+        destspace = opts.get(b'_destspace')
+        if opts.get(b'interactive'):
             try:
-                if extensions.find('histedit'):
-                    enablehistedit = ''
+                if extensions.find(b'histedit'):
+                    enablehistedit = b''
             except KeyError:
-                enablehistedit = " --config extensions.histedit="
-            help = "hg%s help -e histedit" % enablehistedit
+                enablehistedit = b" --config extensions.histedit="
+            help = b"hg%s help -e histedit" % enablehistedit
             msg = (
                 _(
-                    "interactive history editing is supported by the "
-                    "'histedit' extension (see \"%s\")"
+                    b"interactive history editing is supported by the "
+                    b"'histedit' extension (see \"%s\")"
                 )
                 % help
             )
             raise error.Abort(msg)
 
         if rbsrt.collapsemsg and not rbsrt.collapsef:
-            raise error.Abort(_('message can only be specified with collapse'))
+            raise error.Abort(_(b'message can only be specified with collapse'))
 
         if action:
             if rbsrt.collapsef:
                 raise error.Abort(
-                    _('cannot use collapse with continue or abort')
+                    _(b'cannot use collapse with continue or abort')
                 )
             if srcf or basef or destf:
                 raise error.Abort(
-                    _('abort and continue do not allow specifying revisions')
+                    _(b'abort and continue do not allow specifying revisions')
                 )
-            if action == 'abort' and opts.get('tool', False):
-                ui.warn(_('tool option will be ignored\n'))
-            if action == 'continue':
+            if action == b'abort' and opts.get(b'tool', False):
+                ui.warn(_(b'tool option will be ignored\n'))
+            if action == b'continue':
                 ms = mergemod.mergestate.read(repo)
                 mergeutil.checkunresolved(ms)
 
-            retcode = rbsrt._prepareabortorcontinue(isabort=(action == 'abort'))
+            retcode = rbsrt._prepareabortorcontinue(
+                isabort=(action == b'abort')
+            )
             if retcode is not None:
                 return retcode
         else:
@@ -1223,9 +1236,9 @@
 
         tr = None
 
-        singletr = ui.configbool('rebase', 'singletransaction')
+        singletr = ui.configbool(b'rebase', b'singletransaction')
         if singletr:
-            tr = repo.transaction('rebase')
+            tr = repo.transaction(b'rebase')
 
         # If `rebase.singletransaction` is enabled, wrap the entire operation in
         # one transaction here. Otherwise, transactions are obtained when
@@ -1235,7 +1248,7 @@
             # rebasing in-memory (it's not needed).
             dsguard = None
             if singletr and not inmemory:
-                dsguard = dirstateguard.dirstateguard(repo, 'rebase')
+                dsguard = dirstateguard.dirstateguard(repo, b'rebase')
             with util.acceptintervention(dsguard):
                 rbsrt._performrebase(tr)
                 if not leaveunfinished:
@@ -1259,19 +1272,20 @@
     # destspace is here to work around issues with `hg pull --rebase` see
     # issue5214 for details
     if srcf and basef:
-        raise error.Abort(_('cannot specify both a source and a base'))
+        raise error.Abort(_(b'cannot specify both a source and a base'))
     if revf and basef:
-        raise error.Abort(_('cannot specify both a revision and a base'))
+        raise error.Abort(_(b'cannot specify both a revision and a base'))
     if revf and srcf:
-        raise error.Abort(_('cannot specify both a revision and a source'))
+        raise error.Abort(_(b'cannot specify both a revision and a source'))
 
     if not inmemory:
         cmdutil.checkunfinished(repo)
         cmdutil.bailifchanged(repo)
 
-    if ui.configbool('commands', 'rebase.requiredest') and not destf:
+    if ui.configbool(b'commands', b'rebase.requiredest') and not destf:
         raise error.Abort(
-            _('you must specify a destination'), hint=_('use: hg rebase -d REV')
+            _(b'you must specify a destination'),
+            hint=_(b'use: hg rebase -d REV'),
         )
 
     dest = None
@@ -1279,20 +1293,20 @@
     if revf:
         rebaseset = scmutil.revrange(repo, revf)
         if not rebaseset:
-            ui.status(_('empty "rev" revision set - nothing to rebase\n'))
+            ui.status(_(b'empty "rev" revision set - nothing to rebase\n'))
             return None
     elif srcf:
         src = scmutil.revrange(repo, [srcf])
         if not src:
-            ui.status(_('empty "source" revision set - nothing to rebase\n'))
+            ui.status(_(b'empty "source" revision set - nothing to rebase\n'))
             return None
-        rebaseset = repo.revs('(%ld)::', src)
+        rebaseset = repo.revs(b'(%ld)::', src)
         assert rebaseset
     else:
-        base = scmutil.revrange(repo, [basef or '.'])
+        base = scmutil.revrange(repo, [basef or b'.'])
         if not base:
             ui.status(
-                _('empty "base" revision set - ' "can't compute rebase set\n")
+                _(b'empty "base" revision set - ' b"can't compute rebase set\n")
             )
             return None
         if destf:
@@ -1305,16 +1319,16 @@
         roots = []  # selected children of branching points
         bpbase = {}  # {branchingpoint: [origbase]}
         for b in base:  # group bases by branching points
-            bp = repo.revs('ancestor(%d, %d)', b, dest.rev()).first()
+            bp = repo.revs(b'ancestor(%d, %d)', b, dest.rev()).first()
             bpbase[bp] = bpbase.get(bp, []) + [b]
         if None in bpbase:
             # emulate the old behavior, showing "nothing to rebase" (a better
             # behavior may be abort with "cannot find branching point" error)
             bpbase.clear()
         for bp, bs in bpbase.iteritems():  # calculate roots
-            roots += list(repo.revs('children(%d) & ancestors(%ld)', bp, bs))
+            roots += list(repo.revs(b'children(%d) & ancestors(%ld)', bp, bs))
 
-        rebaseset = repo.revs('%ld::', roots)
+        rebaseset = repo.revs(b'%ld::', roots)
 
         if not rebaseset:
             # transform to list because smartsets are not comparable to
@@ -1324,48 +1338,48 @@
                 if basef:
                     ui.status(
                         _(
-                            'nothing to rebase - %s is both "base"'
-                            ' and destination\n'
+                            b'nothing to rebase - %s is both "base"'
+                            b' and destination\n'
                         )
                         % dest
                     )
                 else:
                     ui.status(
                         _(
-                            'nothing to rebase - working directory '
-                            'parent is also destination\n'
+                            b'nothing to rebase - working directory '
+                            b'parent is also destination\n'
                         )
                     )
-            elif not repo.revs('%ld - ::%d', base, dest.rev()):
+            elif not repo.revs(b'%ld - ::%d', base, dest.rev()):
                 if basef:
                     ui.status(
                         _(
-                            'nothing to rebase - "base" %s is '
-                            'already an ancestor of destination '
-                            '%s\n'
+                            b'nothing to rebase - "base" %s is '
+                            b'already an ancestor of destination '
+                            b'%s\n'
                         )
-                        % ('+'.join(bytes(repo[r]) for r in base), dest)
+                        % (b'+'.join(bytes(repo[r]) for r in base), dest)
                     )
                 else:
                     ui.status(
                         _(
-                            'nothing to rebase - working '
-                            'directory parent is already an '
-                            'ancestor of destination %s\n'
+                            b'nothing to rebase - working '
+                            b'directory parent is already an '
+                            b'ancestor of destination %s\n'
                         )
                         % dest
                     )
             else:  # can it happen?
                 ui.status(
-                    _('nothing to rebase from %s to %s\n')
-                    % ('+'.join(bytes(repo[r]) for r in base), dest)
+                    _(b'nothing to rebase from %s to %s\n')
+                    % (b'+'.join(bytes(repo[r]) for r in base), dest)
                 )
             return None
 
-    rebasingwcp = repo['.'].rev() in rebaseset
+    rebasingwcp = repo[b'.'].rev() in rebaseset
     ui.log(
-        "rebase",
-        "rebasing working copy parent: %r\n",
+        b"rebase",
+        b"rebasing working copy parent: %r\n",
         rebasingwcp,
         rebase_rebasing_wcp=rebasingwcp,
     )
@@ -1378,8 +1392,8 @@
         dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
         destf = bytes(dest)
 
-    allsrc = revsetlang.formatspec('%ld', rebaseset)
-    alias = {'ALLSRC': allsrc}
+    allsrc = revsetlang.formatspec(b'%ld', rebaseset)
+    alias = {b'ALLSRC': allsrc}
 
     if dest is None:
         try:
@@ -1389,7 +1403,7 @@
             # multi-dest path: resolve dest for each SRC separately
             destmap = {}
             for r in rebaseset:
-                alias['SRC'] = revsetlang.formatspec('%d', r)
+                alias[b'SRC'] = revsetlang.formatspec(b'%d', r)
                 # use repo.anyrevs instead of scmutil.revsingle because we
                 # don't want to abort if destset is empty.
                 destset = repo.anyrevs([destf], user=True, localalias=alias)
@@ -1397,10 +1411,10 @@
                 if size == 1:
                     destmap[r] = destset.first()
                 elif size == 0:
-                    ui.note(_('skipping %s - empty destination\n') % repo[r])
+                    ui.note(_(b'skipping %s - empty destination\n') % repo[r])
                 else:
                     raise error.Abort(
-                        _('rebase destination for %s is not ' 'unique')
+                        _(b'rebase destination for %s is not ' b'unique')
                         % repo[r]
                     )
 
@@ -1410,7 +1424,7 @@
         destmap = {r: destrev for r in rebaseset}  # {srcrev: destrev}
 
     if not destmap:
-        ui.status(_('nothing to rebase - empty destination\n'))
+        ui.status(_(b'nothing to rebase - empty destination\n'))
         return None
 
     return destmap
@@ -1435,10 +1449,10 @@
         return parents.pop()
     raise error.Abort(
         _(
-            'unable to collapse on top of %d, there is more '
-            'than one external parent: %s'
+            b'unable to collapse on top of %d, there is more '
+            b'than one external parent: %s'
         )
-        % (max(destancestors), ', '.join("%d" % p for p in sorted(parents)))
+        % (max(destancestors), b', '.join(b"%d" % p for p in sorted(parents)))
     )
 
 
@@ -1446,14 +1460,14 @@
     '''Commit the memory changes with parents p1 and p2.
     Return node of committed revision.'''
     # Replicates the empty check in ``repo.commit``.
-    if wctx.isempty() and not repo.ui.configbool('ui', 'allowemptycommit'):
+    if wctx.isempty() and not repo.ui.configbool(b'ui', b'allowemptycommit'):
         return None
 
     # By convention, ``extra['branch']`` (set by extrafn) clobbers
     # ``branch`` (used when passing ``--keepbranches``).
     branch = repo[p1].branch()
-    if 'branch' in extra:
-        branch = extra['branch']
+    if b'branch' in extra:
+        branch = extra[b'branch']
 
     memctx = wctx.tomemctx(
         commitmsg,
@@ -1473,8 +1487,8 @@
     '''Commit the wd changes with parents p1 and p2.
     Return node of committed revision.'''
     dsguard = util.nullcontextmanager()
-    if not repo.ui.configbool('rebase', 'singletransaction'):
-        dsguard = dirstateguard.dirstateguard(repo, 'rebase')
+    if not repo.ui.configbool(b'rebase', b'singletransaction'):
+        dsguard = dirstateguard.dirstateguard(repo, b'rebase')
     with dsguard:
         repo.setparents(repo[p1].node(), repo[p2].node())
 
@@ -1488,24 +1502,24 @@
 
 
 def rebasenode(repo, rev, p1, base, collapse, dest, wctx):
-    'Rebase a single revision rev on top of p1 using base as merge ancestor'
+    b'Rebase a single revision rev on top of p1 using base as merge ancestor'
     # Merge phase
     # Update to destination and merge it with local
     if wctx.isinmemory():
         wctx.setbase(repo[p1])
     else:
-        if repo['.'].rev() != p1:
-            repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1]))
+        if repo[b'.'].rev() != p1:
+            repo.ui.debug(b" update to %d:%s\n" % (p1, repo[p1]))
             mergemod.update(repo, p1, branchmerge=False, force=True)
         else:
-            repo.ui.debug(" already in destination\n")
+            repo.ui.debug(b" already in destination\n")
         # This is, alas, necessary to invalidate workingctx's manifest cache,
         # as well as other data we litter on it in other places.
         wctx = repo[None]
         repo.dirstate.write(repo.currenttransaction())
-    repo.ui.debug(" merge against %d:%s\n" % (rev, repo[rev]))
+    repo.ui.debug(b" merge against %d:%s\n" % (rev, repo[rev]))
     if base is not None:
-        repo.ui.debug("   detach base %d:%s\n" % (base, repo[base]))
+        repo.ui.debug(b"   detach base %d:%s\n" % (base, repo[base]))
     # When collapsing in-place, the parent is the common ancestor, we
     # have to allow merging with it.
     stats = mergemod.update(
@@ -1515,7 +1529,7 @@
         force=True,
         ancestor=base,
         mergeancestor=collapse,
-        labels=['dest', 'source'],
+        labels=[b'dest', b'source'],
         wc=wctx,
     )
     if collapse:
@@ -1595,7 +1609,7 @@
     for prev in repo.changelog.parentrevs(rev):
         adjusted = dest
         if prev != nullrev:
-            candidate = repo.revs('max(%ld and (::%d))', source, prev).first()
+            candidate = repo.revs(b'max(%ld and (::%d))', source, prev).first()
             if candidate is not None:
                 adjusted = state[candidate]
         if adjusted == dest and dest in state:
@@ -1603,7 +1617,7 @@
             if adjusted == revtodo:
                 # sortsource should produce an order that makes this impossible
                 raise error.ProgrammingError(
-                    'rev %d should be rebased already at this time' % dest
+                    b'rev %d should be rebased already at this time' % dest
                 )
         result.append(adjusted)
     return result
@@ -1618,17 +1632,17 @@
     successors in destination or no non-obsolete successor.
     """
     # Obsolete node with successors not in dest leads to divergence
-    divergenceok = ui.configbool('experimental', 'evolution.allowdivergence')
+    divergenceok = ui.configbool(b'experimental', b'evolution.allowdivergence')
     divergencebasecandidates = rebaseobsrevs - rebaseobsskipped
 
     if divergencebasecandidates and not divergenceok:
         divhashes = (bytes(repo[r]) for r in divergencebasecandidates)
-        msg = _("this rebase will cause " "divergences from: %s")
+        msg = _(b"this rebase will cause " b"divergences from: %s")
         h = _(
-            "to force the rebase please set "
-            "experimental.evolution.allowdivergence=True"
+            b"to force the rebase please set "
+            b"experimental.evolution.allowdivergence=True"
         )
-        raise error.Abort(msg % (",".join(divhashes),), hint=h)
+        raise error.Abort(msg % (b",".join(divhashes),), hint=h)
 
 
 def successorrevs(unfi, rev):
@@ -1748,8 +1762,8 @@
         if set(newps) == set(oldps) and dest not in newps:
             raise error.Abort(
                 _(
-                    'cannot rebase %d:%s without '
-                    'moving at least one of its parents'
+                    b'cannot rebase %d:%s without '
+                    b'moving at least one of its parents'
                 )
                 % (rev, repo[rev])
             )
@@ -1758,7 +1772,7 @@
     # impossible. With multi-dest, the initial check does not cover complex
     # cases since we don't have abstractions to dry-run rebase cheaply.
     if any(p != nullrev and isancestor(rev, p) for p in newps):
-        raise error.Abort(_('source is ancestor of destination'))
+        raise error.Abort(_(b'source is ancestor of destination'))
 
     # "rebasenode" updates to new p1, use the corresponding merge base.
     if bases[0] != nullrev:
@@ -1789,7 +1803,7 @@
             # Revisions in the side (not chosen as merge base) branch that
             # might contain "surprising" contents
             siderevs = list(
-                repo.revs('((%ld-%d) %% (%d+%d))', bases, base, base, dest)
+                repo.revs(b'((%ld-%d) %% (%d+%d))', bases, base, base, dest)
             )
 
             # If those revisions are covered by rebaseset, the result is good.
@@ -1803,7 +1817,7 @@
                 ]
                 unwanted[i] = list(
                     repo.revs(
-                        '%ld - (::%ld) - %ld', siderevs, merges, rebaseset
+                        b'%ld - (::%ld) - %ld', siderevs, merges, rebaseset
                     )
                 )
 
@@ -1825,34 +1839,34 @@
         # The merge will include unwanted revisions. Abort now. Revisit this if
         # we have a more advanced merge algorithm that handles multiple bases.
         if l > 0:
-            unwanteddesc = _(' or ').join(
+            unwanteddesc = _(b' or ').join(
                 (
-                    ', '.join('%d:%s' % (r, repo[r]) for r in revs)
+                    b', '.join(b'%d:%s' % (r, repo[r]) for r in revs)
                     for revs in unwanted
                     if revs is not None
                 )
             )
             raise error.Abort(
-                _('rebasing %d:%s will include unwanted changes from %s')
+                _(b'rebasing %d:%s will include unwanted changes from %s')
                 % (rev, repo[rev], unwanteddesc)
             )
 
-    repo.ui.debug(" future parents are %d and %d\n" % tuple(newps))
+    repo.ui.debug(b" future parents are %d and %d\n" % tuple(newps))
 
     return newps[0], newps[1], base
 
 
 def isagitpatch(repo, patchname):
-    'Return true if the given patch is in git format'
+    b'Return true if the given patch is in git format'
     mqpatch = os.path.join(repo.mq.path, patchname)
-    for line in patch.linereader(open(mqpatch, 'rb')):
-        if line.startswith('diff --git'):
+    for line in patch.linereader(open(mqpatch, b'rb')):
+        if line.startswith(b'diff --git'):
             return True
     return False
 
 
 def updatemq(repo, state, skipped, **opts):
-    'Update rebased mq patches - finalize and then import them'
+    b'Update rebased mq patches - finalize and then import them'
     mqrebase = {}
     mq = repo.mq
     original_series = mq.fullseries[:]
@@ -1862,7 +1876,7 @@
         rev = repo[p.node].rev()
         if rev in state:
             repo.ui.debug(
-                'revision %d is an mq patch (%s), finalize it.\n'
+                b'revision %d is an mq patch (%s), finalize it.\n'
                 % (rev, p.name)
             )
             mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
@@ -1878,11 +1892,15 @@
             if rev not in skipped:
                 name, isgit = mqrebase[rev]
                 repo.ui.note(
-                    _('updating mq patch %s to %d:%s\n')
+                    _(b'updating mq patch %s to %d:%s\n')
                     % (name, state[rev], repo[state[rev]])
                 )
                 mq.qimport(
-                    repo, (), patchname=name, git=isgit, rev=["%d" % state[rev]]
+                    repo,
+                    (),
+                    patchname=name,
+                    git=isgit,
+                    rev=[b"%d" % state[rev]],
                 )
             else:
                 # Rebased and skipped
@@ -1902,22 +1920,22 @@
 
 
 def storecollapsemsg(repo, collapsemsg):
-    'Store the collapse message to allow recovery'
-    collapsemsg = collapsemsg or ''
-    f = repo.vfs("last-message.txt", "w")
-    f.write("%s\n" % collapsemsg)
+    b'Store the collapse message to allow recovery'
+    collapsemsg = collapsemsg or b''
+    f = repo.vfs(b"last-message.txt", b"w")
+    f.write(b"%s\n" % collapsemsg)
     f.close()
 
 
 def clearcollapsemsg(repo):
-    'Remove collapse message file'
-    repo.vfs.unlinkpath("last-message.txt", ignoremissing=True)
+    b'Remove collapse message file'
+    repo.vfs.unlinkpath(b"last-message.txt", ignoremissing=True)
 
 
 def restorecollapsemsg(repo, isabort):
-    'Restore previously stored collapse message'
+    b'Restore previously stored collapse message'
     try:
-        f = repo.vfs("last-message.txt")
+        f = repo.vfs(b"last-message.txt")
         collapsemsg = f.readline().strip()
         f.close()
     except IOError as err:
@@ -1925,19 +1943,19 @@
             raise
         if isabort:
             # Oh well, just abort like normal
-            collapsemsg = ''
+            collapsemsg = b''
         else:
-            raise error.Abort(_('missing .hg/last-message.txt for rebase'))
+            raise error.Abort(_(b'missing .hg/last-message.txt for rebase'))
     return collapsemsg
 
 
 def clearstatus(repo):
-    'Remove the status files'
+    b'Remove the status files'
     # Make sure the active transaction won't write the state file
     tr = repo.currenttransaction()
     if tr:
-        tr.removefilegenerator('rebasestate')
-    repo.vfs.unlinkpath("rebasestate", ignoremissing=True)
+        tr.removefilegenerator(b'rebasestate')
+    repo.vfs.unlinkpath(b"rebasestate", ignoremissing=True)
 
 
 def needupdate(repo, state):
@@ -1980,7 +1998,7 @@
             if destmap[r] not in srcset:
                 result.append(r)
         if not result:
-            raise error.Abort(_('source and destination form a cycle'))
+            raise error.Abort(_(b'source and destination form a cycle'))
         srcset -= set(result)
         yield result
 
@@ -1992,28 +2010,28 @@
     destmap: {srcrev: destrev}
     '''
     rebaseset = destmap.keys()
-    originalwd = repo['.'].rev()
+    originalwd = repo[b'.'].rev()
 
     # This check isn't strictly necessary, since mq detects commits over an
     # applied patch. But it prevents messing up the working directory when
     # a partially completed rebase is blocked by mq.
-    if 'qtip' in repo.tags():
+    if b'qtip' in repo.tags():
         mqapplied = set(repo[s.node].rev() for s in repo.mq.applied)
         if set(destmap.values()) & mqapplied:
-            raise error.Abort(_('cannot rebase onto an applied mq patch'))
+            raise error.Abort(_(b'cannot rebase onto an applied mq patch'))
 
     # Get "cycle" error early by exhausting the generator.
     sortedsrc = list(sortsource(destmap))  # a list of sorted revs
     if not sortedsrc:
-        raise error.Abort(_('no matching revisions'))
+        raise error.Abort(_(b'no matching revisions'))
 
     # Only check the first batch of revisions to rebase not depending on other
     # rebaseset. This means "source is ancestor of destination" for the second
     # (and following) batches of revisions are not checked here. We rely on
     # "defineparents" to do that check.
-    roots = list(repo.set('roots(%ld)', sortedsrc[0]))
+    roots = list(repo.set(b'roots(%ld)', sortedsrc[0]))
     if not roots:
-        raise error.Abort(_('no matching revisions'))
+        raise error.Abort(_(b'no matching revisions'))
 
     def revof(r):
         return r.rev()
@@ -2025,7 +2043,7 @@
         dest = repo[destmap[root.rev()]]
         commonbase = root.ancestor(dest)
         if commonbase == root:
-            raise error.Abort(_('source is ancestor of destination'))
+            raise error.Abort(_(b'source is ancestor of destination'))
         if commonbase == dest:
             wctx = repo[None]
             if dest == wctx.p1():
@@ -2037,11 +2055,11 @@
                 # mark the revision as done by setting its new revision
                 # equal to its old (current) revisions
                 state[root.rev()] = root.rev()
-                repo.ui.debug('source is a child of destination\n')
+                repo.ui.debug(b'source is a child of destination\n')
                 continue
 
         emptyrebase = False
-        repo.ui.debug('rebase onto %s starting from %s\n' % (dest, root))
+        repo.ui.debug(b'rebase onto %s starting from %s\n' % (dest, root))
     if emptyrebase:
         return None
     for rev in sorted(state):
@@ -2104,36 +2122,36 @@
         changes = {}
         for oldns, newn in replacements.iteritems():
             for oldn in oldns:
-                changes[hf(oldn)] = fl([hf(n) for n in newn], name='node')
-        nodechanges = fd(changes, key="oldnode", value="newnodes")
+                changes[hf(oldn)] = fl([hf(n) for n in newn], name=b'node')
+        nodechanges = fd(changes, key=b"oldnode", value=b"newnodes")
         fm.data(nodechanges=nodechanges)
     if keepf:
         replacements = {}
-    scmutil.cleanupnodes(repo, replacements, 'rebase', moves, backup=backup)
+    scmutil.cleanupnodes(repo, replacements, b'rebase', moves, backup=backup)
 
 
 def pullrebase(orig, ui, repo, *args, **opts):
-    'Call rebase after pull if the latter has been invoked with --rebase'
+    b'Call rebase after pull if the latter has been invoked with --rebase'
     if opts.get(r'rebase'):
-        if ui.configbool('commands', 'rebase.requiredest'):
-            msg = _('rebase destination required by configuration')
-            hint = _('use hg pull followed by hg rebase -d DEST')
+        if ui.configbool(b'commands', b'rebase.requiredest'):
+            msg = _(b'rebase destination required by configuration')
+            hint = _(b'use hg pull followed by hg rebase -d DEST')
             raise error.Abort(msg, hint=hint)
 
         with repo.wlock(), repo.lock():
             if opts.get(r'update'):
                 del opts[r'update']
                 ui.debug(
-                    '--update and --rebase are not compatible, ignoring '
-                    'the update flag\n'
+                    b'--update and --rebase are not compatible, ignoring '
+                    b'the update flag\n'
                 )
 
             cmdutil.checkunfinished(repo, skipmerge=True)
             cmdutil.bailifchanged(
                 repo,
                 hint=_(
-                    'cannot pull with rebase: '
-                    'please commit or shelve your changes first'
+                    b'cannot pull with rebase: '
+                    b'please commit or shelve your changes first'
                 ),
             )
 
@@ -2166,16 +2184,16 @@
                 except error.NoMergeDestAbort:
                     # we can maybe update instead
                     rev, _a, _b = destutil.destupdate(repo)
-                    if rev == repo['.'].rev():
-                        ui.status(_('nothing to rebase\n'))
+                    if rev == repo[b'.'].rev():
+                        ui.status(_(b'nothing to rebase\n'))
                     else:
-                        ui.status(_('nothing to rebase - updating instead\n'))
+                        ui.status(_(b'nothing to rebase - updating instead\n'))
                         # not passing argument to get the bare update behavior
                         # with warning and trumpets
                         commands.update(ui, repo)
     else:
         if opts.get(r'tool'):
-            raise error.Abort(_('--tool can only be used with --rebase'))
+            raise error.Abort(_(b'--tool can only be used with --rebase'))
         ret = orig(ui, repo, *args, **opts)
 
     return ret
@@ -2205,7 +2223,7 @@
     assert repo.filtername is None
     cl = repo.changelog
     nodemap = cl.nodemap
-    extinctrevs = set(repo.revs('extinct()'))
+    extinctrevs = set(repo.revs(b'extinct()'))
     for srcrev in rebaseobsrevs:
         srcnode = cl.node(srcrev)
         # XXX: more advanced APIs are required to handle split correctly
@@ -2258,7 +2276,7 @@
 
 
 def summaryhook(ui, repo):
-    if not repo.vfs.exists('rebasestate'):
+    if not repo.vfs.exists(b'rebasestate'):
         return
     try:
         rbsrt = rebaseruntime(repo, ui, {})
@@ -2266,16 +2284,16 @@
         state = rbsrt.state
     except error.RepoLookupError:
         # i18n: column positioning for "hg summary"
-        msg = _('rebase: (use "hg rebase --abort" to clear broken state)\n')
+        msg = _(b'rebase: (use "hg rebase --abort" to clear broken state)\n')
         ui.write(msg)
         return
     numrebased = len([i for i in state.itervalues() if i >= 0])
     # i18n: column positioning for "hg summary"
     ui.write(
-        _('rebase: %s, %s (rebase --continue)\n')
+        _(b'rebase: %s, %s (rebase --continue)\n')
         % (
-            ui.label(_('%d rebased'), 'rebase.rebased') % numrebased,
-            ui.label(_('%d remaining'), 'rebase.remaining')
+            ui.label(_(b'%d rebased'), b'rebase.rebased') % numrebased,
+            ui.label(_(b'%d remaining'), b'rebase.remaining')
             % (len(state) - numrebased),
         )
     )
@@ -2283,15 +2301,15 @@
 
 def uisetup(ui):
     # Replace pull with a decorator to provide --rebase option
-    entry = extensions.wrapcommand(commands.table, 'pull', pullrebase)
+    entry = extensions.wrapcommand(commands.table, b'pull', pullrebase)
     entry[1].append(
-        ('', 'rebase', None, _("rebase working directory to branch head"))
+        (b'', b'rebase', None, _(b"rebase working directory to branch head"))
     )
-    entry[1].append(('t', 'tool', '', _("specify merge tool for rebase")))
-    cmdutil.summaryhooks.add('rebase', summaryhook)
+    entry[1].append((b't', b'tool', b'', _(b"specify merge tool for rebase")))
+    cmdutil.summaryhooks.add(b'rebase', summaryhook)
     statemod.addunfinished(
-        'rebase',
-        fname='rebasestate',
+        b'rebase',
+        fname=b'rebasestate',
         stopflag=True,
         continueflag=True,
         abortfunc=abortrebase,
--- a/hgext/record.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/record.py	Sun Oct 06 09:48:39 2019 -0400
@@ -27,15 +27,15 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 
 @command(
-    "record",
+    b"record",
     # same options as commit + white space diff options
-    [c for c in commands.table['commit|ci'][1][:] if c[1] != "interactive"]
+    [c for c in commands.table[b'commit|ci'][1][:] if c[1] != b"interactive"]
     + cmdutil.diffwsopts,
-    _('hg record [OPTION]... [FILE]...'),
+    _(b'hg record [OPTION]... [FILE]...'),
     helpcategory=command.CATEGORY_COMMITTING,
 )
 def record(ui, repo, *pats, **opts):
@@ -69,12 +69,12 @@
 
     if not ui.interactive():
         raise error.Abort(
-            _('running non-interactively, use %s instead') % 'commit'
+            _(b'running non-interactively, use %s instead') % b'commit'
         )
 
     opts[r"interactive"] = True
-    overrides = {('experimental', 'crecord'): False}
-    with ui.configoverride(overrides, 'record'):
+    overrides = {(b'experimental', b'crecord'): False}
+    with ui.configoverride(overrides, b'record'):
         return commands.commit(ui, repo, *pats, **opts)
 
 
@@ -82,7 +82,7 @@
     if not opts[r'interactive']:
         return origfn(ui, repo, *pats, **opts)
 
-    mq = extensions.find('mq')
+    mq = extensions.find(b'mq')
 
     def committomq(ui, repo, *pats, **opts):
         # At this point the working copy contains only changes that
@@ -99,9 +99,9 @@
 
 # This command registration is replaced during uisetup().
 @command(
-    'qrecord',
+    b'qrecord',
     [],
-    _('hg qrecord [OPTION]... PATCH [FILE]...'),
+    _(b'hg qrecord [OPTION]... PATCH [FILE]...'),
     helpcategory=command.CATEGORY_COMMITTING,
     inferrepo=True,
 )
@@ -111,14 +111,14 @@
     See :hg:`help qnew` & :hg:`help record` for more information and
     usage.
     '''
-    return _qrecord('qnew', ui, repo, patch, *pats, **opts)
+    return _qrecord(b'qnew', ui, repo, patch, *pats, **opts)
 
 
 def _qrecord(cmdsuggest, ui, repo, patch, *pats, **opts):
     try:
-        mq = extensions.find('mq')
+        mq = extensions.find(b'mq')
     except KeyError:
-        raise error.Abort(_("'mq' extension not loaded"))
+        raise error.Abort(_(b"'mq' extension not loaded"))
 
     repo.mq.checkpatchname(patch)
 
@@ -126,8 +126,8 @@
         opts[r'checkname'] = False
         mq.new(ui, repo, patch, *pats, **opts)
 
-    overrides = {('experimental', 'crecord'): False}
-    with ui.configoverride(overrides, 'record'):
+    overrides = {(b'experimental', b'crecord'): False}
+    with ui.configoverride(overrides, b'record'):
         cmdutil.checkunfinished(repo)
         cmdutil.dorecord(
             ui,
@@ -149,27 +149,27 @@
 
 def uisetup(ui):
     try:
-        mq = extensions.find('mq')
+        mq = extensions.find(b'mq')
     except KeyError:
         return
 
-    cmdtable["qrecord"] = (
+    cmdtable[b"qrecord"] = (
         qrecord,
         # same options as qnew, but copy them so we don't get
         # -i/--interactive for qrecord and add white space diff options
-        mq.cmdtable['qnew'][1][:] + cmdutil.diffwsopts,
-        _('hg qrecord [OPTION]... PATCH [FILE]...'),
+        mq.cmdtable[b'qnew'][1][:] + cmdutil.diffwsopts,
+        _(b'hg qrecord [OPTION]... PATCH [FILE]...'),
     )
 
-    _wrapcmd('qnew', mq.cmdtable, qnew, _("interactively record a new patch"))
+    _wrapcmd(b'qnew', mq.cmdtable, qnew, _(b"interactively record a new patch"))
     _wrapcmd(
-        'qrefresh',
+        b'qrefresh',
         mq.cmdtable,
         qrefresh,
-        _("interactively select changes to refresh"),
+        _(b"interactively select changes to refresh"),
     )
 
 
 def _wrapcmd(cmd, table, wrapfn, msg):
     entry = extensions.wrapcommand(table, cmd, wrapfn)
-    entry[1].append(('i', 'interactive', None, msg))
+    entry[1].append((b'i', b'interactive', None, msg))
--- a/hgext/releasenotes.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/releasenotes.py	Sun Oct 06 09:48:39 2019 -0400
@@ -44,20 +44,20 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 DEFAULT_SECTIONS = [
-    ('feature', _('New Features')),
-    ('bc', _('Backwards Compatibility Changes')),
-    ('fix', _('Bug Fixes')),
-    ('perf', _('Performance Improvements')),
-    ('api', _('API Changes')),
+    (b'feature', _(b'New Features')),
+    (b'bc', _(b'Backwards Compatibility Changes')),
+    (b'fix', _(b'Bug Fixes')),
+    (b'perf', _(b'Performance Improvements')),
+    (b'api', _(b'API Changes')),
 ]
 
 RE_DIRECTIVE = re.compile(br'^\.\. ([a-zA-Z0-9_]+)::\s*([^$]+)?$')
 RE_ISSUE = br'\bissue ?[0-9]{4,6}(?![0-9])\b'
 
-BULLET_SECTION = _('Other Changes')
+BULLET_SECTION = _(b'Other Changes')
 
 
 class parsedreleasenotes(object):
@@ -105,8 +105,8 @@
         if not fuzz:
             ui.warn(
                 _(
-                    "module 'fuzzywuzzy' not found, merging of similar "
-                    "releasenotes is disabled\n"
+                    b"module 'fuzzywuzzy' not found, merging of similar "
+                    b"releasenotes is disabled\n"
                 )
             )
 
@@ -119,13 +119,13 @@
                     # TODO prompt for resolution if different and running in
                     # interactive mode.
                     ui.write(
-                        _('%s already exists in %s section; ignoring\n')
+                        _(b'%s already exists in %s section; ignoring\n')
                         % (title, section)
                     )
                     continue
 
                 incoming_str = converttitled([(title, paragraphs)])[0]
-                if section == 'fix':
+                if section == b'fix':
                     issue = getissuenum(incoming_str)
                     if issue:
                         if findissue(ui, existingnotes, issue):
@@ -141,7 +141,7 @@
                     continue
 
                 incoming_str = convertnontitled([paragraphs])[0]
-                if section == 'fix':
+                if section == b'fix':
                     issue = getissuenum(incoming_str)
                     if issue:
                         if findissue(ui, existingnotes, issue):
@@ -187,7 +187,7 @@
         lines = []
         for para in paragraphs:
             lines.extend(para)
-        string_list.append(' '.join(lines))
+        string_list.append(b' '.join(lines))
     return string_list
 
 
@@ -200,7 +200,7 @@
         lines = []
         for para in paragraphs:
             lines.extend(para)
-        string_list.append(' '.join(lines))
+        string_list.append(b' '.join(lines))
     return string_list
 
 
@@ -219,7 +219,7 @@
     Returns true if issue number already exists in notes.
     """
     if any(issue in s for s in existing):
-        ui.write(_('"%s" already exists in notes; ignoring\n') % issue)
+        ui.write(_(b'"%s" already exists in notes; ignoring\n') % issue)
         return True
     else:
         return False
@@ -233,7 +233,7 @@
         merge = similaritycheck(incoming_str, existing)
         if not merge:
             ui.write(
-                _('"%s" already exists in notes file; ignoring\n')
+                _(b'"%s" already exists in notes file; ignoring\n')
                 % incoming_str
             )
             return True
@@ -261,7 +261,7 @@
 
 
 def getcustomadmonitions(repo):
-    ctx = repo['.']
+    ctx = repo[b'.']
     p = config.config()
 
     def read(f, sections=None, remap=None):
@@ -270,12 +270,12 @@
             p.parse(f, data, sections, remap, read)
         else:
             raise error.Abort(
-                _(".hgreleasenotes file \'%s\' not found") % repo.pathto(f)
+                _(b".hgreleasenotes file \'%s\' not found") % repo.pathto(f)
             )
 
-    if '.hgreleasenotes' in ctx:
-        read('.hgreleasenotes')
-    return p['sections']
+    if b'.hgreleasenotes' in ctx:
+        read(b'.hgreleasenotes')
+    return p[b'sections']
 
 
 def checkadmonitions(ui, repo, directives, revs):
@@ -299,7 +299,7 @@
                 continue
             else:
                 ui.write(
-                    _("Invalid admonition '%s' present in changeset %s" "\n")
+                    _(b"Invalid admonition '%s' present in changeset %s" b"\n")
                     % (admonition.group(1), ctx.hex()[:12])
                 )
                 sim = lambda x: difflib.SequenceMatcher(
@@ -308,15 +308,15 @@
 
                 similar = [s for s in directives if sim(s) > 0.6]
                 if len(similar) == 1:
-                    ui.write(_("(did you mean %s?)\n") % similar[0])
+                    ui.write(_(b"(did you mean %s?)\n") % similar[0])
                 elif similar:
-                    ss = ", ".join(sorted(similar))
-                    ui.write(_("(did you mean one of %s?)\n") % ss)
+                    ss = b", ".join(sorted(similar))
+                    ui.write(_(b"(did you mean one of %s?)\n") % ss)
 
 
 def _getadmonitionlist(ui, sections):
     for section in sections:
-        ui.write("%s: %s\n" % (section[0], section[1]))
+        ui.write(b"%s: %s\n" % (section[0], section[1]))
 
 
 def parsenotesfromrevisions(repo, directives, revs):
@@ -330,17 +330,17 @@
         )
 
         for i, block in enumerate(blocks):
-            if block['type'] != 'admonition':
+            if block[b'type'] != b'admonition':
                 continue
 
-            directive = block['admonitiontitle']
-            title = block['lines'][0].strip() if block['lines'] else None
+            directive = block[b'admonitiontitle']
+            title = block[b'lines'][0].strip() if block[b'lines'] else None
 
             if i + 1 == len(blocks):
                 raise error.Abort(
                     _(
-                        'changeset %s: release notes directive %s '
-                        'lacks content'
+                        b'changeset %s: release notes directive %s '
+                        b'lacks content'
                     )
                     % (ctx, directive)
                 )
@@ -352,30 +352,30 @@
                 pblock = blocks[j]
 
                 # Margin blocks may appear between paragraphs. Ignore them.
-                if pblock['type'] == 'margin':
+                if pblock[b'type'] == b'margin':
                     continue
 
-                if pblock['type'] == 'admonition':
+                if pblock[b'type'] == b'admonition':
                     break
 
-                if pblock['type'] != 'paragraph':
+                if pblock[b'type'] != b'paragraph':
                     repo.ui.warn(
                         _(
-                            'changeset %s: unexpected block in release '
-                            'notes directive %s\n'
+                            b'changeset %s: unexpected block in release '
+                            b'notes directive %s\n'
                         )
                         % (ctx, directive)
                     )
 
-                if pblock['indent'] > 0:
-                    paragraphs.append(pblock['lines'])
+                if pblock[b'indent'] > 0:
+                    paragraphs.append(pblock[b'lines'])
                 else:
                     break
 
             # TODO consider using title as paragraph for more concise notes.
             if not paragraphs:
                 repo.ui.warn(
-                    _("error parsing releasenotes for revision: " "'%s'\n")
+                    _(b"error parsing releasenotes for revision: " b"'%s'\n")
                     % node.hex(ctx.node())
                 )
             if title:
@@ -398,51 +398,51 @@
         for i in range(offset + 1, len(blocks)):
             block = blocks[i]
 
-            if block['type'] == 'margin':
+            if block[b'type'] == b'margin':
                 continue
-            elif block['type'] == 'section':
+            elif block[b'type'] == b'section':
                 break
-            elif block['type'] == 'bullet':
-                if block['indent'] != 0:
-                    raise error.Abort(_('indented bullet lists not supported'))
+            elif block[b'type'] == b'bullet':
+                if block[b'indent'] != 0:
+                    raise error.Abort(_(b'indented bullet lists not supported'))
                 if title:
-                    lines = [l[1:].strip() for l in block['lines']]
+                    lines = [l[1:].strip() for l in block[b'lines']]
                     notefragment.append(lines)
                     continue
                 else:
-                    lines = [[l[1:].strip() for l in block['lines']]]
+                    lines = [[l[1:].strip() for l in block[b'lines']]]
 
                     for block in blocks[i + 1 :]:
-                        if block['type'] in ('bullet', 'section'):
+                        if block[b'type'] in (b'bullet', b'section'):
                             break
-                        if block['type'] == 'paragraph':
-                            lines.append(block['lines'])
+                        if block[b'type'] == b'paragraph':
+                            lines.append(block[b'lines'])
                     notefragment.append(lines)
                     continue
-            elif block['type'] != 'paragraph':
+            elif block[b'type'] != b'paragraph':
                 raise error.Abort(
-                    _('unexpected block type in release notes: ' '%s')
-                    % block['type']
+                    _(b'unexpected block type in release notes: ' b'%s')
+                    % block[b'type']
                 )
             if title:
-                notefragment.append(block['lines'])
+                notefragment.append(block[b'lines'])
 
         return notefragment
 
     currentsection = None
     for i, block in enumerate(blocks):
-        if block['type'] != 'section':
+        if block[b'type'] != b'section':
             continue
 
-        title = block['lines'][0]
+        title = block[b'lines'][0]
 
         # TODO the parsing around paragraphs and bullet points needs some
         # work.
-        if block['underline'] == '=':  # main section
+        if block[b'underline'] == b'=':  # main section
             name = sections.sectionfromtitle(title)
             if not name:
                 raise error.Abort(
-                    _('unknown release notes section: %s') % title
+                    _(b'unknown release notes section: %s') % title
                 )
 
             currentsection = name
@@ -451,7 +451,7 @@
                 for para in bullet_points:
                     notes.addnontitleditem(currentsection, para)
 
-        elif block['underline'] == '-':  # sub-section
+        elif block[b'underline'] == b'-':  # sub-section
             if title == BULLET_SECTION:
                 bullet_points = gatherparagraphsbullets(i)
                 for para in bullet_points:
@@ -460,7 +460,7 @@
                 paragraphs = gatherparagraphsbullets(i, True)
                 notes.addtitleditem(currentsection, title, paragraphs)
         else:
-            raise error.Abort(_('unsupported section type for %s') % title)
+            raise error.Abort(_(b'unsupported section type for %s') % title)
 
     return notes
 
@@ -478,23 +478,23 @@
             continue
 
         lines.append(sectiontitle)
-        lines.append('=' * len(sectiontitle))
-        lines.append('')
+        lines.append(b'=' * len(sectiontitle))
+        lines.append(b'')
 
         # First pass to emit sub-sections.
         for title, paragraphs in notes.titledforsection(sectionname):
             lines.append(title)
-            lines.append('-' * len(title))
-            lines.append('')
+            lines.append(b'-' * len(title))
+            lines.append(b'')
 
             for i, para in enumerate(paragraphs):
                 if i:
-                    lines.append('')
+                    lines.append(b'')
                 lines.extend(
-                    stringutil.wrap(' '.join(para), width=78).splitlines()
+                    stringutil.wrap(b' '.join(para), width=78).splitlines()
                 )
 
-            lines.append('')
+            lines.append(b'')
 
         # Second pass to emit bullet list items.
 
@@ -506,58 +506,64 @@
         if notes.titledforsection(sectionname) and nontitled:
             # TODO make configurable.
             lines.append(BULLET_SECTION)
-            lines.append('-' * len(BULLET_SECTION))
-            lines.append('')
+            lines.append(b'-' * len(BULLET_SECTION))
+            lines.append(b'')
 
         for paragraphs in nontitled:
             lines.extend(
                 stringutil.wrap(
-                    ' '.join(paragraphs[0]),
+                    b' '.join(paragraphs[0]),
                     width=78,
-                    initindent='* ',
-                    hangindent='  ',
+                    initindent=b'* ',
+                    hangindent=b'  ',
                 ).splitlines()
             )
 
             for para in paragraphs[1:]:
-                lines.append('')
+                lines.append(b'')
                 lines.extend(
                     stringutil.wrap(
-                        ' '.join(para),
+                        b' '.join(para),
                         width=78,
-                        initindent='  ',
-                        hangindent='  ',
+                        initindent=b'  ',
+                        hangindent=b'  ',
                     ).splitlines()
                 )
 
-            lines.append('')
+            lines.append(b'')
 
     if lines and lines[-1]:
-        lines.append('')
+        lines.append(b'')
 
-    return '\n'.join(lines)
+    return b'\n'.join(lines)
 
 
 @command(
-    'releasenotes',
+    b'releasenotes',
     [
-        ('r', 'rev', '', _('revisions to process for release notes'), _('REV')),
         (
-            'c',
-            'check',
-            False,
-            _('checks for validity of admonitions (if any)'),
-            _('REV'),
+            b'r',
+            b'rev',
+            b'',
+            _(b'revisions to process for release notes'),
+            _(b'REV'),
         ),
         (
-            'l',
-            'list',
+            b'c',
+            b'check',
             False,
-            _('list the available admonitions with their title'),
+            _(b'checks for validity of admonitions (if any)'),
+            _(b'REV'),
+        ),
+        (
+            b'l',
+            b'list',
+            False,
+            _(b'list the available admonitions with their title'),
             None,
         ),
     ],
-    _('hg releasenotes [-r REV] [-c] FILE'),
+    _(b'hg releasenotes [-r REV] [-c] FILE'),
     helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
 )
 def releasenotes(ui, repo, file_=None, **opts):
@@ -646,29 +652,29 @@
     opts = pycompat.byteskwargs(opts)
     sections = releasenotessections(ui, repo)
 
-    listflag = opts.get('list')
+    listflag = opts.get(b'list')
 
-    if listflag and opts.get('rev'):
-        raise error.Abort(_('cannot use both \'--list\' and \'--rev\''))
-    if listflag and opts.get('check'):
-        raise error.Abort(_('cannot use both \'--list\' and \'--check\''))
+    if listflag and opts.get(b'rev'):
+        raise error.Abort(_(b'cannot use both \'--list\' and \'--rev\''))
+    if listflag and opts.get(b'check'):
+        raise error.Abort(_(b'cannot use both \'--list\' and \'--check\''))
 
     if listflag:
         return _getadmonitionlist(ui, sections)
 
-    rev = opts.get('rev')
-    revs = scmutil.revrange(repo, [rev or 'not public()'])
-    if opts.get('check'):
+    rev = opts.get(b'rev')
+    revs = scmutil.revrange(repo, [rev or b'not public()'])
+    if opts.get(b'check'):
         return checkadmonitions(ui, repo, sections.names(), revs)
 
     incoming = parsenotesfromrevisions(repo, sections.names(), revs)
 
     if file_ is None:
-        ui.pager('releasenotes')
+        ui.pager(b'releasenotes')
         return ui.write(serializenotes(sections, incoming))
 
     try:
-        with open(file_, 'rb') as fh:
+        with open(file_, b'rb') as fh:
             notes = parsereleasenotesfile(sections, fh.read())
     except IOError as e:
         if e.errno != errno.ENOENT:
@@ -678,17 +684,17 @@
 
     notes.merge(ui, incoming)
 
-    with open(file_, 'wb') as fh:
+    with open(file_, b'wb') as fh:
         fh.write(serializenotes(sections, notes))
 
 
-@command('debugparsereleasenotes', norepo=True)
+@command(b'debugparsereleasenotes', norepo=True)
 def debugparsereleasenotes(ui, path, repo=None):
     """parse release notes and print resulting data structure"""
-    if path == '-':
+    if path == b'-':
         text = pycompat.stdin.read()
     else:
-        with open(path, 'rb') as fh:
+        with open(path, b'rb') as fh:
             text = fh.read()
 
     sections = releasenotessections(ui, repo)
@@ -696,13 +702,13 @@
     notes = parsereleasenotesfile(sections, text)
 
     for section in notes:
-        ui.write(_('section: %s\n') % section)
+        ui.write(_(b'section: %s\n') % section)
         for title, paragraphs in notes.titledforsection(section):
-            ui.write(_('  subsection: %s\n') % title)
+            ui.write(_(b'  subsection: %s\n') % title)
             for para in paragraphs:
-                ui.write(_('    paragraph: %s\n') % ' '.join(para))
+                ui.write(_(b'    paragraph: %s\n') % b' '.join(para))
 
         for paragraphs in notes.nontitledforsection(section):
-            ui.write(_('  bullet point:\n'))
+            ui.write(_(b'  bullet point:\n'))
             for para in paragraphs:
-                ui.write(_('    paragraph: %s\n') % ' '.join(para))
+                ui.write(_(b'    paragraph: %s\n') % b' '.join(para))
--- a/hgext/relink.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/relink.py	Sun Oct 06 09:48:39 2019 -0400
@@ -26,10 +26,12 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 
-@command('relink', [], _('[ORIGIN]'), helpcategory=command.CATEGORY_MAINTENANCE)
+@command(
+    b'relink', [], _(b'[ORIGIN]'), helpcategory=command.CATEGORY_MAINTENANCE
+)
 def relink(ui, repo, origin=None, **opts):
     """recreate hardlinks between two repositories
 
@@ -55,22 +57,22 @@
     command is running. (Both repositories will be locked against
     writes.)
     """
-    if not util.safehasattr(util, 'samefile') or not util.safehasattr(
-        util, 'samedevice'
+    if not util.safehasattr(util, b'samefile') or not util.safehasattr(
+        util, b'samedevice'
     ):
-        raise error.Abort(_('hardlinks are not supported on this system'))
+        raise error.Abort(_(b'hardlinks are not supported on this system'))
     src = hg.repository(
         repo.baseui,
-        ui.expandpath(origin or 'default-relink', origin or 'default'),
+        ui.expandpath(origin or b'default-relink', origin or b'default'),
     )
-    ui.status(_('relinking %s to %s\n') % (src.store.path, repo.store.path))
+    ui.status(_(b'relinking %s to %s\n') % (src.store.path, repo.store.path))
     if repo.root == src.root:
-        ui.status(_('there is nothing to relink\n'))
+        ui.status(_(b'there is nothing to relink\n'))
         return
 
     if not util.samedevice(src.store.path, repo.store.path):
         # No point in continuing
-        raise error.Abort(_('source and destination are on different devices'))
+        raise error.Abort(_(b'source and destination are on different devices'))
 
     with repo.lock(), src.lock():
         candidates = sorted(collect(src, ui))
@@ -81,7 +83,7 @@
 def collect(src, ui):
     seplen = len(os.path.sep)
     candidates = []
-    live = len(src['tip'].manifest())
+    live = len(src[b'tip'].manifest())
     # Your average repository has some files which were deleted before
     # the tip revision. We account for that by assuming that there are
     # 3 tracked files for every 2 live files as of the tip version of
@@ -90,17 +92,17 @@
     # mozilla-central as of 2010-06-10 had a ratio of just over 7:5.
     total = live * 3 // 2
     src = src.store.path
-    progress = ui.makeprogress(_('collecting'), unit=_('files'), total=total)
+    progress = ui.makeprogress(_(b'collecting'), unit=_(b'files'), total=total)
     pos = 0
     ui.status(
-        _("tip has %d files, estimated total number of files: %d\n")
+        _(b"tip has %d files, estimated total number of files: %d\n")
         % (live, total)
     )
     for dirpath, dirnames, filenames in os.walk(src):
         dirnames.sort()
         relpath = dirpath[len(src) + seplen :]
         for filename in sorted(filenames):
-            if filename[-2:] not in ('.d', '.i'):
+            if filename[-2:] not in (b'.d', b'.i'):
                 continue
             st = os.stat(os.path.join(dirpath, filename))
             if not stat.S_ISREG(st.st_mode):
@@ -110,7 +112,7 @@
             progress.update(pos, item=filename)
 
     progress.complete()
-    ui.status(_('collected %d candidate storage files\n') % len(candidates))
+    ui.status(_(b'collected %d candidate storage files\n') % len(candidates))
     return candidates
 
 
@@ -126,7 +128,7 @@
         if not util.samedevice(src, dst):
             # No point in continuing
             raise error.Abort(
-                _('source and destination are on different devices')
+                _(b'source and destination are on different devices')
             )
         if st.st_size != ts.st_size:
             return False
@@ -134,7 +136,7 @@
 
     targets = []
     progress = ui.makeprogress(
-        _('pruning'), unit=_('files'), total=len(candidates)
+        _(b'pruning'), unit=_(b'files'), total=len(candidates)
     )
     pos = 0
     for fn, st in candidates:
@@ -143,19 +145,21 @@
         tgt = os.path.join(dst, fn)
         ts = linkfilter(srcpath, tgt, st)
         if not ts:
-            ui.debug('not linkable: %s\n' % fn)
+            ui.debug(b'not linkable: %s\n' % fn)
             continue
         targets.append((fn, ts.st_size))
         progress.update(pos, item=fn)
 
     progress.complete()
-    ui.status(_('pruned down to %d probably relinkable files\n') % len(targets))
+    ui.status(
+        _(b'pruned down to %d probably relinkable files\n') % len(targets)
+    )
     return targets
 
 
 def do_relink(src, dst, files, ui):
     def relinkfile(src, dst):
-        bak = dst + '.bak'
+        bak = dst + b'.bak'
         os.rename(dst, bak)
         try:
             util.oslink(src, dst)
@@ -169,7 +173,7 @@
     savedbytes = 0
 
     progress = ui.makeprogress(
-        _('relinking'), unit=_('files'), total=len(files)
+        _(b'relinking'), unit=_(b'files'), total=len(files)
     )
     pos = 0
     for f, sz in files:
@@ -177,8 +181,8 @@
         source = os.path.join(src, f)
         tgt = os.path.join(dst, f)
         # Binary mode, so that read() works correctly, especially on Windows
-        sfp = open(source, 'rb')
-        dfp = open(tgt, 'rb')
+        sfp = open(source, b'rb')
+        dfp = open(tgt, b'rb')
         sin = sfp.read(CHUNKLEN)
         while sin:
             din = dfp.read(CHUNKLEN)
@@ -188,7 +192,7 @@
         sfp.close()
         dfp.close()
         if sin:
-            ui.debug('not linkable: %s\n' % f)
+            ui.debug(b'not linkable: %s\n' % f)
             continue
         try:
             relinkfile(source, tgt)
@@ -196,11 +200,11 @@
             relinked += 1
             savedbytes += sz
         except OSError as inst:
-            ui.warn('%s: %s\n' % (tgt, stringutil.forcebytestr(inst)))
+            ui.warn(b'%s: %s\n' % (tgt, stringutil.forcebytestr(inst)))
 
     progress.complete()
 
     ui.status(
-        _('relinked %d files (%s reclaimed)\n')
+        _(b'relinked %d files (%s reclaimed)\n')
         % (relinked, util.bytecount(savedbytes))
     )
--- a/hgext/remotefilelog/__init__.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/remotefilelog/__init__.py	Sun Oct 06 09:48:39 2019 -0400
@@ -185,72 +185,74 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('remotefilelog', 'debug', default=False)
+configitem(b'remotefilelog', b'debug', default=False)
 
-configitem('remotefilelog', 'reponame', default='')
-configitem('remotefilelog', 'cachepath', default=None)
-configitem('remotefilelog', 'cachegroup', default=None)
-configitem('remotefilelog', 'cacheprocess', default=None)
-configitem('remotefilelog', 'cacheprocess.includepath', default=None)
-configitem("remotefilelog", "cachelimit", default="1000 GB")
+configitem(b'remotefilelog', b'reponame', default=b'')
+configitem(b'remotefilelog', b'cachepath', default=None)
+configitem(b'remotefilelog', b'cachegroup', default=None)
+configitem(b'remotefilelog', b'cacheprocess', default=None)
+configitem(b'remotefilelog', b'cacheprocess.includepath', default=None)
+configitem(b"remotefilelog", b"cachelimit", default=b"1000 GB")
 
 configitem(
-    'remotefilelog',
-    'fallbackpath',
+    b'remotefilelog',
+    b'fallbackpath',
     default=configitems.dynamicdefault,
-    alias=[('remotefilelog', 'fallbackrepo')],
+    alias=[(b'remotefilelog', b'fallbackrepo')],
 )
 
-configitem('remotefilelog', 'validatecachelog', default=None)
-configitem('remotefilelog', 'validatecache', default='on')
-configitem('remotefilelog', 'server', default=None)
-configitem('remotefilelog', 'servercachepath', default=None)
-configitem("remotefilelog", "serverexpiration", default=30)
-configitem('remotefilelog', 'backgroundrepack', default=False)
-configitem('remotefilelog', 'bgprefetchrevs', default=None)
-configitem('remotefilelog', 'pullprefetch', default=None)
-configitem('remotefilelog', 'backgroundprefetch', default=False)
-configitem('remotefilelog', 'prefetchdelay', default=120)
-configitem('remotefilelog', 'prefetchdays', default=14)
+configitem(b'remotefilelog', b'validatecachelog', default=None)
+configitem(b'remotefilelog', b'validatecache', default=b'on')
+configitem(b'remotefilelog', b'server', default=None)
+configitem(b'remotefilelog', b'servercachepath', default=None)
+configitem(b"remotefilelog", b"serverexpiration", default=30)
+configitem(b'remotefilelog', b'backgroundrepack', default=False)
+configitem(b'remotefilelog', b'bgprefetchrevs', default=None)
+configitem(b'remotefilelog', b'pullprefetch', default=None)
+configitem(b'remotefilelog', b'backgroundprefetch', default=False)
+configitem(b'remotefilelog', b'prefetchdelay', default=120)
+configitem(b'remotefilelog', b'prefetchdays', default=14)
 
-configitem('remotefilelog', 'getfilesstep', default=10000)
-configitem('remotefilelog', 'getfilestype', default='optimistic')
-configitem('remotefilelog', 'batchsize', configitems.dynamicdefault)
-configitem('remotefilelog', 'fetchwarning', default='')
+configitem(b'remotefilelog', b'getfilesstep', default=10000)
+configitem(b'remotefilelog', b'getfilestype', default=b'optimistic')
+configitem(b'remotefilelog', b'batchsize', configitems.dynamicdefault)
+configitem(b'remotefilelog', b'fetchwarning', default=b'')
 
-configitem('remotefilelog', 'includepattern', default=None)
-configitem('remotefilelog', 'excludepattern', default=None)
+configitem(b'remotefilelog', b'includepattern', default=None)
+configitem(b'remotefilelog', b'excludepattern', default=None)
 
-configitem('remotefilelog', 'gcrepack', default=False)
-configitem('remotefilelog', 'repackonhggc', default=False)
-configitem('repack', 'chainorphansbysize', default=True, experimental=True)
+configitem(b'remotefilelog', b'gcrepack', default=False)
+configitem(b'remotefilelog', b'repackonhggc', default=False)
+configitem(b'repack', b'chainorphansbysize', default=True, experimental=True)
 
-configitem('packs', 'maxpacksize', default=0)
-configitem('packs', 'maxchainlen', default=1000)
+configitem(b'packs', b'maxpacksize', default=0)
+configitem(b'packs', b'maxchainlen', default=1000)
 
-configitem('devel', 'remotefilelog.ensurestart', default=False)
+configitem(b'devel', b'remotefilelog.ensurestart', default=False)
 
 #  default TTL limit is 30 days
 _defaultlimit = 60 * 60 * 24 * 30
-configitem('remotefilelog', 'nodettl', default=_defaultlimit)
+configitem(b'remotefilelog', b'nodettl', default=_defaultlimit)
 
-configitem('remotefilelog', 'data.gencountlimit', default=2),
-configitem('remotefilelog', 'data.generations', default=['1GB', '100MB', '1MB'])
-configitem('remotefilelog', 'data.maxrepackpacks', default=50)
-configitem('remotefilelog', 'data.repackmaxpacksize', default='4GB')
-configitem('remotefilelog', 'data.repacksizelimit', default='100MB')
+configitem(b'remotefilelog', b'data.gencountlimit', default=2),
+configitem(
+    b'remotefilelog', b'data.generations', default=[b'1GB', b'100MB', b'1MB']
+)
+configitem(b'remotefilelog', b'data.maxrepackpacks', default=50)
+configitem(b'remotefilelog', b'data.repackmaxpacksize', default=b'4GB')
+configitem(b'remotefilelog', b'data.repacksizelimit', default=b'100MB')
 
-configitem('remotefilelog', 'history.gencountlimit', default=2),
-configitem('remotefilelog', 'history.generations', default=['100MB'])
-configitem('remotefilelog', 'history.maxrepackpacks', default=50)
-configitem('remotefilelog', 'history.repackmaxpacksize', default='400MB')
-configitem('remotefilelog', 'history.repacksizelimit', default='100MB')
+configitem(b'remotefilelog', b'history.gencountlimit', default=2),
+configitem(b'remotefilelog', b'history.generations', default=[b'100MB'])
+configitem(b'remotefilelog', b'history.maxrepackpacks', default=50)
+configitem(b'remotefilelog', b'history.repackmaxpacksize', default=b'400MB')
+configitem(b'remotefilelog', b'history.repacksizelimit', default=b'100MB')
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 repoclass = localrepo.localrepository
 repoclass._basesupported.add(constants.SHALLOWREPO_REQUIREMENT)
@@ -264,80 +266,80 @@
     """
     hg.wirepeersetupfuncs.append(fileserverclient.peersetup)
 
-    entry = extensions.wrapcommand(commands.table, 'clone', cloneshallow)
+    entry = extensions.wrapcommand(commands.table, b'clone', cloneshallow)
     entry[1].append(
         (
-            '',
-            'shallow',
+            b'',
+            b'shallow',
             None,
-            _("create a shallow clone which uses remote file " "history"),
+            _(b"create a shallow clone which uses remote file " b"history"),
         )
     )
 
     extensions.wrapcommand(
-        commands.table, 'debugindex', debugcommands.debugindex
+        commands.table, b'debugindex', debugcommands.debugindex
     )
     extensions.wrapcommand(
-        commands.table, 'debugindexdot', debugcommands.debugindexdot
+        commands.table, b'debugindexdot', debugcommands.debugindexdot
     )
-    extensions.wrapcommand(commands.table, 'log', log)
-    extensions.wrapcommand(commands.table, 'pull', pull)
+    extensions.wrapcommand(commands.table, b'log', log)
+    extensions.wrapcommand(commands.table, b'pull', pull)
 
     # Prevent 'hg manifest --all'
     def _manifest(orig, ui, repo, *args, **opts):
         if isenabled(repo) and opts.get(r'all'):
-            raise error.Abort(_("--all is not supported in a shallow repo"))
+            raise error.Abort(_(b"--all is not supported in a shallow repo"))
 
         return orig(ui, repo, *args, **opts)
 
-    extensions.wrapcommand(commands.table, "manifest", _manifest)
+    extensions.wrapcommand(commands.table, b"manifest", _manifest)
 
     # Wrap remotefilelog with lfs code
     def _lfsloaded(loaded=False):
         lfsmod = None
         try:
-            lfsmod = extensions.find('lfs')
+            lfsmod = extensions.find(b'lfs')
         except KeyError:
             pass
         if lfsmod:
             lfsmod.wrapfilelog(remotefilelog.remotefilelog)
             fileserverclient._lfsmod = lfsmod
 
-    extensions.afterloaded('lfs', _lfsloaded)
+    extensions.afterloaded(b'lfs', _lfsloaded)
 
     # debugdata needs remotefilelog.len to work
-    extensions.wrapcommand(commands.table, 'debugdata', debugdatashallow)
+    extensions.wrapcommand(commands.table, b'debugdata', debugdatashallow)
 
     changegroup.cgpacker = shallowbundle.shallowcg1packer
 
     extensions.wrapfunction(
-        changegroup, '_addchangegroupfiles', shallowbundle.addchangegroupfiles
+        changegroup, b'_addchangegroupfiles', shallowbundle.addchangegroupfiles
     )
     extensions.wrapfunction(
-        changegroup, 'makechangegroup', shallowbundle.makechangegroup
+        changegroup, b'makechangegroup', shallowbundle.makechangegroup
     )
-    extensions.wrapfunction(localrepo, 'makestore', storewrapper)
-    extensions.wrapfunction(exchange, 'pull', exchangepull)
-    extensions.wrapfunction(merge, 'applyupdates', applyupdates)
-    extensions.wrapfunction(merge, '_checkunknownfiles', checkunknownfiles)
-    extensions.wrapfunction(context.workingctx, '_checklookup', checklookup)
-    extensions.wrapfunction(scmutil, '_findrenames', findrenames)
+    extensions.wrapfunction(localrepo, b'makestore', storewrapper)
+    extensions.wrapfunction(exchange, b'pull', exchangepull)
+    extensions.wrapfunction(merge, b'applyupdates', applyupdates)
+    extensions.wrapfunction(merge, b'_checkunknownfiles', checkunknownfiles)
+    extensions.wrapfunction(context.workingctx, b'_checklookup', checklookup)
+    extensions.wrapfunction(scmutil, b'_findrenames', findrenames)
     extensions.wrapfunction(
-        copies, '_computeforwardmissing', computeforwardmissing
+        copies, b'_computeforwardmissing', computeforwardmissing
     )
-    extensions.wrapfunction(dispatch, 'runcommand', runcommand)
-    extensions.wrapfunction(repair, '_collectbrokencsets', _collectbrokencsets)
-    extensions.wrapfunction(context.changectx, 'filectx', filectx)
-    extensions.wrapfunction(context.workingctx, 'filectx', workingfilectx)
-    extensions.wrapfunction(patch, 'trydiff', trydiff)
-    extensions.wrapfunction(hg, 'verify', _verify)
-    scmutil.fileprefetchhooks.add('remotefilelog', _fileprefetchhook)
+    extensions.wrapfunction(dispatch, b'runcommand', runcommand)
+    extensions.wrapfunction(repair, b'_collectbrokencsets', _collectbrokencsets)
+    extensions.wrapfunction(context.changectx, b'filectx', filectx)
+    extensions.wrapfunction(context.workingctx, b'filectx', workingfilectx)
+    extensions.wrapfunction(patch, b'trydiff', trydiff)
+    extensions.wrapfunction(hg, b'verify', _verify)
+    scmutil.fileprefetchhooks.add(b'remotefilelog', _fileprefetchhook)
 
     # disappointing hacks below
-    extensions.wrapfunction(scmutil, 'getrenamedfn', getrenamedfn)
-    extensions.wrapfunction(revset, 'filelog', filelogrevset)
-    revset.symbols['filelog'] = revset.filelog
-    extensions.wrapfunction(cmdutil, 'walkfilerevs', walkfilerevs)
+    extensions.wrapfunction(scmutil, b'getrenamedfn', getrenamedfn)
+    extensions.wrapfunction(revset, b'filelog', filelogrevset)
+    revset.symbols[b'filelog'] = revset.filelog
+    extensions.wrapfunction(cmdutil, b'walkfilerevs', walkfilerevs)
 
 
 def cloneshallow(orig, ui, repo, *args, **opts):
@@ -366,7 +368,7 @@
             else:
                 return orig(self, *args, **kwargs)
 
-        extensions.wrapfunction(exchange, 'pull', pull_shallow)
+        extensions.wrapfunction(exchange, b'pull', pull_shallow)
 
         # Wrap the stream logic to add requirements and to pass include/exclude
         # patterns around.
@@ -378,30 +380,34 @@
                 if constants.NETWORK_CAP_LEGACY_SSH_GETFILES in caps:
                     opts = {}
                     if repo.includepattern:
-                        opts[r'includepattern'] = '\0'.join(repo.includepattern)
+                        opts[r'includepattern'] = b'\0'.join(
+                            repo.includepattern
+                        )
                     if repo.excludepattern:
-                        opts[r'excludepattern'] = '\0'.join(repo.excludepattern)
-                    return remote._callstream('stream_out_shallow', **opts)
+                        opts[r'excludepattern'] = b'\0'.join(
+                            repo.excludepattern
+                        )
+                    return remote._callstream(b'stream_out_shallow', **opts)
                 else:
                     return orig()
 
-            extensions.wrapfunction(remote, 'stream_out', stream_out_shallow)
+            extensions.wrapfunction(remote, b'stream_out', stream_out_shallow)
 
         def stream_wrap(orig, op):
             setup_streamout(op.repo, op.remote)
             return orig(op)
 
         extensions.wrapfunction(
-            streamclone, 'maybeperformlegacystreamclone', stream_wrap
+            streamclone, b'maybeperformlegacystreamclone', stream_wrap
         )
 
         def canperformstreamclone(orig, pullop, bundle2=False):
             # remotefilelog is currently incompatible with the
             # bundle2 flavor of streamclones, so force us to use
             # v1 instead.
-            if 'v2' in pullop.remotebundle2caps.get('stream', []):
-                pullop.remotebundle2caps['stream'] = [
-                    c for c in pullop.remotebundle2caps['stream'] if c != 'v2'
+            if b'v2' in pullop.remotebundle2caps.get(b'stream', []):
+                pullop.remotebundle2caps[b'stream'] = [
+                    c for c in pullop.remotebundle2caps[b'stream'] if c != b'v2'
                 ]
             if bundle2:
                 return False, None
@@ -411,7 +417,7 @@
             return supported, requirements
 
         extensions.wrapfunction(
-            streamclone, 'canperformstreamclone', canperformstreamclone
+            streamclone, b'canperformstreamclone', canperformstreamclone
         )
 
     try:
@@ -419,7 +425,7 @@
     finally:
         if opts.get(r'shallow'):
             for r in repos:
-                if util.safehasattr(r, 'fileservice'):
+                if util.safehasattr(r, b'fileservice'):
                     r.fileservice.close()
 
 
@@ -437,14 +443,14 @@
         return
 
     # put here intentionally bc doesnt work in uisetup
-    ui.setconfig('hooks', 'update.prefetch', wcpprefetch)
-    ui.setconfig('hooks', 'commit.prefetch', wcpprefetch)
+    ui.setconfig(b'hooks', b'update.prefetch', wcpprefetch)
+    ui.setconfig(b'hooks', b'commit.prefetch', wcpprefetch)
 
-    isserverenabled = ui.configbool('remotefilelog', 'server')
+    isserverenabled = ui.configbool(b'remotefilelog', b'server')
     isshallowclient = isenabled(repo)
 
     if isserverenabled and isshallowclient:
-        raise RuntimeError("Cannot be both a server and shallow client.")
+        raise RuntimeError(b"Cannot be both a server and shallow client.")
 
     if isshallowclient:
         setupclient(ui, repo)
@@ -481,7 +487,7 @@
     if isenabled(repo):
         manifest = mctx.manifest()
         files = []
-        for f, args, msg in actions['g']:
+        for f, args, msg in actions[b'g']:
             files.append((f, hex(manifest[f])))
         # batch fetch the needed files from the server
         repo.fileservice.prefetch(files)
@@ -498,9 +504,9 @@
         for f, (m, actionargs, msg) in actions.iteritems():
             if sparsematch and not sparsematch(f):
                 continue
-            if m in ('c', 'dc', 'cm'):
+            if m in (b'c', b'dc', b'cm'):
                 files.append((f, hex(mctx.filenode(f))))
-            elif m == 'dg':
+            elif m == b'dg':
                 f2 = actionargs[0]
                 files.append((f2, hex(mctx.filenode(f2))))
         # batch fetch the needed files from the server
@@ -526,7 +532,7 @@
 def findrenames(orig, repo, matcher, added, removed, *args, **kwargs):
     if isenabled(repo):
         files = []
-        pmf = repo['.'].manifest()
+        pmf = repo[b'.'].manifest()
         for f in removed:
             if f in pmf:
                 files.append((f, hex(pmf[f])))
@@ -713,7 +719,7 @@
             )
 
     extensions.wrapfunction(
-        remotefilelog.remotefilelog, 'addrawrevision', addrawrevision
+        remotefilelog.remotefilelog, b'addrawrevision', addrawrevision
     )
 
     def changelogadd(orig, self, *args):
@@ -728,7 +734,7 @@
                     log.addrawrevision(rt, tr, linknode, p1, p2, n, fl, c, m)
                 else:
                     raise error.ProgrammingError(
-                        'pending multiple integer revisions are not supported'
+                        b'pending multiple integer revisions are not supported'
                     )
         else:
             # "link" is actually wrong here (it is set to len(changelog))
@@ -736,12 +742,12 @@
             # but still do a sanity check about pending multiple revisions
             if len(set(x[3] for x in pendingfilecommits)) > 1:
                 raise error.ProgrammingError(
-                    'pending multiple integer revisions are not supported'
+                    b'pending multiple integer revisions are not supported'
                 )
         del pendingfilecommits[:]
         return node
 
-    extensions.wrapfunction(changelog.changelog, 'add', changelogadd)
+    extensions.wrapfunction(changelog.changelog, b'add', changelogadd)
 
 
 def getrenamedfn(orig, repo, endrev=None):
@@ -780,16 +786,16 @@
     # remotefilelog's can't be walked in rev order, so throw.
     # The caller will see the exception and walk the commit tree instead.
     if not follow:
-        raise cmdutil.FileWalkError("Cannot walk via filelog")
+        raise cmdutil.FileWalkError(b"Cannot walk via filelog")
 
     wanted = set()
     minrev, maxrev = min(revs), max(revs)
 
-    pctx = repo['.']
+    pctx = repo[b'.']
     for filename in match.files():
         if filename not in pctx:
             raise error.Abort(
-                _('cannot follow file not in parent ' 'revision: "%s"')
+                _(b'cannot follow file not in parent ' b'revision: "%s"')
                 % filename
             )
         fctx = pctx[filename]
@@ -821,9 +827,9 @@
         return orig(repo, subset, x)
 
     # i18n: "filelog" is a keyword
-    pat = revset.getstring(x, _("filelog requires a pattern"))
+    pat = revset.getstring(x, _(b"filelog requires a pattern"))
     m = match.match(
-        repo.root, repo.getcwd(), [pat], default='relpath', ctx=repo[None]
+        repo.root, repo.getcwd(), [pat], default=b'relpath', ctx=repo[None]
     )
     s = set()
 
@@ -848,7 +854,7 @@
     return smartset.baseset([r for r in subset if r in s])
 
 
-@command('gc', [], _('hg gc [REPO...]'), norepo=True)
+@command(b'gc', [], _(b'hg gc [REPO...]'), norepo=True)
 def gc(ui, *args, **opts):
     '''garbage collect the client and server filelog caches
     '''
@@ -861,7 +867,7 @@
 
     # get repo client and server cache
     repopaths = []
-    pwd = ui.environ.get('PWD')
+    pwd = ui.environ.get(b'PWD')
     if pwd:
         repopaths.append(pwd)
 
@@ -889,12 +895,12 @@
 
 def gcclient(ui, cachepath):
     # get list of repos that use this cache
-    repospath = os.path.join(cachepath, 'repos')
+    repospath = os.path.join(cachepath, b'repos')
     if not os.path.exists(repospath):
-        ui.warn(_("no known cache at %s\n") % cachepath)
+        ui.warn(_(b"no known cache at %s\n") % cachepath)
         return
 
-    reposfile = open(repospath, 'rb')
+    reposfile = open(repospath, b'rb')
     repos = {r[:-1] for r in reposfile.readlines()}
     reposfile.close()
 
@@ -907,7 +913,7 @@
 
     count = 0
     progress = ui.makeprogress(
-        _("analyzing repositories"), unit="repos", total=len(repos)
+        _(b"analyzing repositories"), unit=b"repos", total=len(repos)
     )
     for path in repos:
         progress.update(count)
@@ -915,7 +921,7 @@
         try:
             path = ui.expandpath(os.path.normpath(path))
         except TypeError as e:
-            ui.warn(_("warning: malformed path: %r:%s\n") % (path, e))
+            ui.warn(_(b"warning: malformed path: %r:%s\n") % (path, e))
             traceback.print_exc()
             continue
         try:
@@ -932,15 +938,17 @@
         if not isenabled(repo):
             continue
 
-        if not util.safehasattr(repo, 'name'):
-            ui.warn(_("repo %s is a misconfigured remotefilelog repo\n") % path)
+        if not util.safehasattr(repo, b'name'):
+            ui.warn(
+                _(b"repo %s is a misconfigured remotefilelog repo\n") % path
+            )
             continue
 
         # If garbage collection on repack and repack on hg gc are enabled
         # then loose files are repacked and garbage collected.
         # Otherwise regular garbage collection is performed.
-        repackonhggc = repo.ui.configbool('remotefilelog', 'repackonhggc')
-        gcrepack = repo.ui.configbool('remotefilelog', 'gcrepack')
+        repackonhggc = repo.ui.configbool(b'remotefilelog', b'repackonhggc')
+        gcrepack = repo.ui.configbool(b'remotefilelog', b'gcrepack')
         if repackonhggc and gcrepack:
             try:
                 repackmod.incrementalrepack(repo)
@@ -966,8 +974,8 @@
     # write list of valid repos back
     oldumask = os.umask(0o002)
     try:
-        reposfile = open(repospath, 'wb')
-        reposfile.writelines([("%s\n" % r) for r in validrepos])
+        reposfile = open(repospath, b'wb')
+        reposfile.writelines([(b"%s\n" % r) for r in validrepos])
         reposfile.close()
     finally:
         os.umask(oldumask)
@@ -976,7 +984,7 @@
     if sharedcache is not None:
         sharedcache.gc(keepkeys)
     elif not filesrepacked:
-        ui.warn(_("warning: no valid repos in repofile\n"))
+        ui.warn(_(b"warning: no valid repos in repofile\n"))
 
 
 def log(orig, ui, repo, *pats, **opts):
@@ -995,7 +1003,7 @@
         # If this is a non-follow log without any revs specified, recommend that
         # the user add -f to speed it up.
         if not follow and not revs:
-            match = scmutil.match(repo['.'], pats, pycompat.byteskwargs(opts))
+            match = scmutil.match(repo[b'.'], pats, pycompat.byteskwargs(opts))
             isfile = not match.anypats()
             if isfile:
                 for file in match.files():
@@ -1006,8 +1014,8 @@
             if isfile:
                 ui.warn(
                     _(
-                        "warning: file log can be slow on large repos - "
-                        + "use -f to speed it up\n"
+                        b"warning: file log can be slow on large repos - "
+                        + b"use -f to speed it up\n"
                     )
                 )
 
@@ -1019,9 +1027,9 @@
     are included. The default value is set to 14 days. If 'prefetchdays' is set
     to zero or negative value then date restriction is not applied.
     """
-    days = ui.configint('remotefilelog', 'prefetchdays')
+    days = ui.configint(b'remotefilelog', b'prefetchdays')
     if days > 0:
-        revset = '(%s) & date(-%s)' % (revset, days)
+        revset = b'(%s) & date(-%s)' % (revset, days)
     return revset
 
 
@@ -1030,11 +1038,11 @@
     This only relates to prefetches after operations that change the working
     copy parent. Default delay between background prefetches is 2 minutes.
     """
-    timeout = repo.ui.configint('remotefilelog', 'prefetchdelay')
-    fname = repo.vfs.join('lastprefetch')
+    timeout = repo.ui.configint(b'remotefilelog', b'prefetchdelay')
+    fname = repo.vfs.join(b'lastprefetch')
 
     ready = False
-    with open(fname, 'a'):
+    with open(fname, b'a'):
         # the with construct above is used to avoid race conditions
         modtime = os.path.getmtime(fname)
         if (time.time() - modtime) > timeout:
@@ -1049,18 +1057,18 @@
     Does background repack if backgroundrepack flag is set in config.
     """
     shallow = isenabled(repo)
-    bgprefetchrevs = ui.config('remotefilelog', 'bgprefetchrevs')
+    bgprefetchrevs = ui.config(b'remotefilelog', b'bgprefetchrevs')
     isready = readytofetch(repo)
 
     if not (shallow and bgprefetchrevs and isready):
         return
 
-    bgrepack = repo.ui.configbool('remotefilelog', 'backgroundrepack')
+    bgrepack = repo.ui.configbool(b'remotefilelog', b'backgroundrepack')
     # update a revset with a date limit
     bgprefetchrevs = revdatelimit(ui, bgprefetchrevs)
 
     def anon():
-        if util.safehasattr(repo, 'ranprefetch') and repo.ranprefetch:
+        if util.safehasattr(repo, b'ranprefetch') and repo.ranprefetch:
             return
         repo.ranprefetch = True
         repo.backgroundprefetch(bgprefetchrevs, repack=bgrepack)
@@ -1073,15 +1081,15 @@
 
     if isenabled(repo):
         # prefetch if it's configured
-        prefetchrevset = ui.config('remotefilelog', 'pullprefetch')
-        bgrepack = repo.ui.configbool('remotefilelog', 'backgroundrepack')
-        bgprefetch = repo.ui.configbool('remotefilelog', 'backgroundprefetch')
-        ensurestart = repo.ui.configbool('devel', 'remotefilelog.ensurestart')
+        prefetchrevset = ui.config(b'remotefilelog', b'pullprefetch')
+        bgrepack = repo.ui.configbool(b'remotefilelog', b'backgroundrepack')
+        bgprefetch = repo.ui.configbool(b'remotefilelog', b'backgroundprefetch')
+        ensurestart = repo.ui.configbool(b'devel', b'remotefilelog.ensurestart')
 
         if prefetchrevset:
-            ui.status(_("prefetching file contents\n"))
+            ui.status(_(b"prefetching file contents\n"))
             revs = scmutil.revrange(repo, [prefetchrevset])
-            base = repo['.'].rev()
+            base = repo[b'.'].rev()
             if bgprefetch:
                 repo.backgroundprefetch(
                     prefetchrevset, repack=bgrepack, ensurestart=ensurestart
@@ -1113,10 +1121,10 @@
             source, heads=heads, common=common, bundlecaps=bundlecaps, **kwargs
         )
 
-    if util.safehasattr(remote, '_callstream'):
+    if util.safehasattr(remote, b'_callstream'):
         remote._localrepo = repo
-    elif util.safehasattr(remote, 'getbundle'):
-        extensions.wrapfunction(remote, 'getbundle', localgetbundle)
+    elif util.safehasattr(remote, b'getbundle'):
+        extensions.wrapfunction(remote, b'getbundle', localgetbundle)
 
     return orig(repo, remote, *args, **kwargs)
 
@@ -1137,9 +1145,9 @@
 
 
 @command(
-    'debugremotefilelog',
-    [('d', 'decompress', None, _('decompress the filelog first')),],
-    _('hg debugremotefilelog <path>'),
+    b'debugremotefilelog',
+    [(b'd', b'decompress', None, _(b'decompress the filelog first')),],
+    _(b'hg debugremotefilelog <path>'),
     norepo=True,
 )
 def debugremotefilelog(ui, path, **opts):
@@ -1147,9 +1155,9 @@
 
 
 @command(
-    'verifyremotefilelog',
-    [('d', 'decompress', None, _('decompress the filelogs first')),],
-    _('hg verifyremotefilelogs <directory>'),
+    b'verifyremotefilelog',
+    [(b'd', b'decompress', None, _(b'decompress the filelogs first')),],
+    _(b'hg verifyremotefilelogs <directory>'),
     norepo=True,
 )
 def verifyremotefilelog(ui, path, **opts):
@@ -1157,24 +1165,24 @@
 
 
 @command(
-    'debugdatapack',
+    b'debugdatapack',
     [
-        ('', 'long', None, _('print the long hashes')),
-        ('', 'node', '', _('dump the contents of node'), 'NODE'),
+        (b'', b'long', None, _(b'print the long hashes')),
+        (b'', b'node', b'', _(b'dump the contents of node'), b'NODE'),
     ],
-    _('hg debugdatapack <paths>'),
+    _(b'hg debugdatapack <paths>'),
     norepo=True,
 )
 def debugdatapack(ui, *paths, **opts):
     return debugcommands.debugdatapack(ui, *paths, **opts)
 
 
-@command('debughistorypack', [], _('hg debughistorypack <path>'), norepo=True)
+@command(b'debughistorypack', [], _(b'hg debughistorypack <path>'), norepo=True)
 def debughistorypack(ui, path, **opts):
     return debugcommands.debughistorypack(ui, path)
 
 
-@command('debugkeepset', [], _('hg debugkeepset'))
+@command(b'debugkeepset', [], _(b'hg debugkeepset'))
 def debugkeepset(ui, repo, **opts):
     # The command is used to measure keepset computation time
     def keyfn(fname, fnode):
@@ -1184,48 +1192,48 @@
     return
 
 
-@command('debugwaitonrepack', [], _('hg debugwaitonrepack'))
+@command(b'debugwaitonrepack', [], _(b'hg debugwaitonrepack'))
 def debugwaitonrepack(ui, repo, **opts):
     return debugcommands.debugwaitonrepack(repo)
 
 
-@command('debugwaitonprefetch', [], _('hg debugwaitonprefetch'))
+@command(b'debugwaitonprefetch', [], _(b'hg debugwaitonprefetch'))
 def debugwaitonprefetch(ui, repo, **opts):
     return debugcommands.debugwaitonprefetch(repo)
 
 
 def resolveprefetchopts(ui, opts):
-    if not opts.get('rev'):
-        revset = ['.', 'draft()']
+    if not opts.get(b'rev'):
+        revset = [b'.', b'draft()']
 
-        prefetchrevset = ui.config('remotefilelog', 'pullprefetch', None)
+        prefetchrevset = ui.config(b'remotefilelog', b'pullprefetch', None)
         if prefetchrevset:
-            revset.append('(%s)' % prefetchrevset)
-        bgprefetchrevs = ui.config('remotefilelog', 'bgprefetchrevs', None)
+            revset.append(b'(%s)' % prefetchrevset)
+        bgprefetchrevs = ui.config(b'remotefilelog', b'bgprefetchrevs', None)
         if bgprefetchrevs:
-            revset.append('(%s)' % bgprefetchrevs)
-        revset = '+'.join(revset)
+            revset.append(b'(%s)' % bgprefetchrevs)
+        revset = b'+'.join(revset)
 
         # update a revset with a date limit
         revset = revdatelimit(ui, revset)
 
-        opts['rev'] = [revset]
+        opts[b'rev'] = [revset]
 
-    if not opts.get('base'):
-        opts['base'] = None
+    if not opts.get(b'base'):
+        opts[b'base'] = None
 
     return opts
 
 
 @command(
-    'prefetch',
+    b'prefetch',
     [
-        ('r', 'rev', [], _('prefetch the specified revisions'), _('REV')),
-        ('', 'repack', False, _('run repack after prefetch')),
-        ('b', 'base', '', _("rev that is assumed to already be local")),
+        (b'r', b'rev', [], _(b'prefetch the specified revisions'), _(b'REV')),
+        (b'', b'repack', False, _(b'run repack after prefetch')),
+        (b'b', b'base', b'', _(b"rev that is assumed to already be local")),
     ]
     + commands.walkopts,
-    _('hg prefetch [OPTIONS] [FILE...]'),
+    _(b'hg prefetch [OPTIONS] [FILE...]'),
 )
 def prefetch(ui, repo, *pats, **opts):
     """prefetch file revisions from the server
@@ -1239,39 +1247,39 @@
     """
     opts = pycompat.byteskwargs(opts)
     if not isenabled(repo):
-        raise error.Abort(_("repo is not shallow"))
+        raise error.Abort(_(b"repo is not shallow"))
 
     opts = resolveprefetchopts(ui, opts)
-    revs = scmutil.revrange(repo, opts.get('rev'))
-    repo.prefetch(revs, opts.get('base'), pats, opts)
+    revs = scmutil.revrange(repo, opts.get(b'rev'))
+    repo.prefetch(revs, opts.get(b'base'), pats, opts)
 
-    ensurestart = repo.ui.configbool('devel', 'remotefilelog.ensurestart')
+    ensurestart = repo.ui.configbool(b'devel', b'remotefilelog.ensurestart')
 
     # Run repack in background
-    if opts.get('repack'):
+    if opts.get(b'repack'):
         repackmod.backgroundrepack(
             repo, incremental=True, ensurestart=ensurestart
         )
 
 
 @command(
-    'repack',
+    b'repack',
     [
-        ('', 'background', None, _('run in a background process'), None),
-        ('', 'incremental', None, _('do an incremental repack'), None),
+        (b'', b'background', None, _(b'run in a background process'), None),
+        (b'', b'incremental', None, _(b'do an incremental repack'), None),
         (
-            '',
-            'packsonly',
+            b'',
+            b'packsonly',
             None,
-            _('only repack packs (skip loose objects)'),
+            _(b'only repack packs (skip loose objects)'),
             None,
         ),
     ],
-    _('hg repack [OPTIONS]'),
+    _(b'hg repack [OPTIONS]'),
 )
 def repack_(ui, repo, *pats, **opts):
     if opts.get(r'background'):
-        ensurestart = repo.ui.configbool('devel', 'remotefilelog.ensurestart')
+        ensurestart = repo.ui.configbool(b'devel', b'remotefilelog.ensurestart')
         repackmod.backgroundrepack(
             repo,
             incremental=opts.get(r'incremental'),
@@ -1280,7 +1288,7 @@
         )
         return
 
-    options = {'packsonly': opts.get(r'packsonly')}
+    options = {b'packsonly': opts.get(r'packsonly')}
 
     try:
         if opts.get(r'incremental'):
@@ -1290,4 +1298,4 @@
     except repackmod.RepackAlreadyRunning as ex:
         # Don't propogate the exception if the repack is already in
         # progress, since we want the command to exit 0.
-        repo.ui.warn('%s\n' % ex)
+        repo.ui.warn(b'%s\n' % ex)
--- a/hgext/remotefilelog/basepack.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/remotefilelog/basepack.py	Sun Oct 06 09:48:39 2019 -0400
@@ -56,9 +56,9 @@
     # With glibc 2.7+ the 'e' flag uses O_CLOEXEC when opening.
     # The 'e' flag will be ignored on older versions of glibc.
     # Python 3 can't handle the 'e' flag.
-    PACKOPENMODE = 'rbe'
+    PACKOPENMODE = b'rbe'
 else:
-    PACKOPENMODE = 'rb'
+    PACKOPENMODE = b'rb'
 
 
 class _cachebackedpacks(object):
@@ -132,7 +132,7 @@
                 # Someone could have removed the file since we retrieved the
                 # list of paths.
                 if getattr(ex, 'errno', None) != errno.ENOENT:
-                    ui.warn(_('unable to load pack %s: %s\n') % (filepath, ex))
+                    ui.warn(_(b'unable to load pack %s: %s\n') % (filepath, ex))
                 continue
             packs.append(pack)
 
@@ -210,8 +210,8 @@
         """Returns metrics on the state of this store."""
         size, count = self.gettotalsizeandcount()
         return {
-            'numpacks': count,
-            'totalpacksize': size,
+            b'numpacks': count,
+            b'totalpacksize': size,
         }
 
     def getpack(self, path):
@@ -276,9 +276,9 @@
                 # only affect this instance
                 self.VERSION = version
             elif self.VERSION != version:
-                raise RuntimeError('inconsistent version: %d' % version)
+                raise RuntimeError(b'inconsistent version: %d' % version)
         else:
-            raise RuntimeError('unsupported version: %d' % version)
+            raise RuntimeError(b'unsupported version: %d' % version)
 
 
 class basepack(versionmixin):
@@ -300,10 +300,10 @@
         self._data = None
         self.freememory()  # initialize the mmap
 
-        version = struct.unpack('!B', self._data[:PACKVERSIONSIZE])[0]
+        version = struct.unpack(b'!B', self._data[:PACKVERSIONSIZE])[0]
         self._checkversion(version)
 
-        version, config = struct.unpack('!BB', self._index[:INDEXVERSIONSIZE])
+        version, config = struct.unpack(b'!BB', self._index[:INDEXVERSIONSIZE])
         self._checkversion(version)
 
         if 0b10000000 & config:
@@ -318,14 +318,14 @@
         fanouttable = []
         for i in pycompat.xrange(0, params.fanoutcount):
             loc = i * 4
-            fanoutentry = struct.unpack('!I', rawfanout[loc : loc + 4])[0]
+            fanoutentry = struct.unpack(b'!I', rawfanout[loc : loc + 4])[0]
             fanouttable.append(fanoutentry)
         return fanouttable
 
     @util.propertycache
     def _indexend(self):
         nodecount = struct.unpack_from(
-            '!Q', self._index, self.params.indexstart - 8
+            b'!Q', self._index, self.params.indexstart - 8
         )[0]
         return self.params.indexstart + nodecount * self.INDEXENTRYLENGTH
 
@@ -372,7 +372,7 @@
     def __init__(self, ui, packdir, version=2):
         self._checkversion(version)
         # TODO(augie): make this configurable
-        self._compressor = 'GZ'
+        self._compressor = b'GZ'
         opener = vfsmod.vfs(packdir)
         opener.createmode = 0o444
         self.opener = opener
@@ -381,10 +381,10 @@
 
         shallowutil.mkstickygroupdir(ui, packdir)
         self.packfp, self.packpath = opener.mkstemp(
-            suffix=self.PACKSUFFIX + '-tmp'
+            suffix=self.PACKSUFFIX + b'-tmp'
         )
         self.idxfp, self.idxpath = opener.mkstemp(
-            suffix=self.INDEXSUFFIX + '-tmp'
+            suffix=self.INDEXSUFFIX + b'-tmp'
         )
         self.packfp = os.fdopen(self.packfp, r'wb+')
         self.idxfp = os.fdopen(self.idxfp, r'wb+')
@@ -400,7 +400,7 @@
         # Write header
         # TODO: make it extensible (ex: allow specifying compression algorithm,
         # a flexible key/value header, delta algorithm, fanout size, etc)
-        versionbuf = struct.pack('!B', self.VERSION)  # unsigned 1 byte int
+        versionbuf = struct.pack(b'!B', self.VERSION)  # unsigned 1 byte int
         self.writeraw(versionbuf)
 
     def __enter__(self):
@@ -491,14 +491,14 @@
             if fanouttable[fanoutkey] == EMPTYFANOUT:
                 fanouttable[fanoutkey] = location
 
-        rawfanouttable = ''
+        rawfanouttable = b''
         last = 0
         for offset in fanouttable:
             offset = offset if offset != EMPTYFANOUT else last
             last = offset
-            rawfanouttable += struct.pack('!I', offset)
+            rawfanouttable += struct.pack(b'!I', offset)
 
-        rawentrieslength = struct.pack('!Q', len(self.entries))
+        rawentrieslength = struct.pack(b'!Q', len(self.entries))
 
         # The index offset is the it's location in the file. So after the 2 byte
         # header and the fanouttable.
@@ -521,7 +521,7 @@
         config = 0
         if indexparams.fanoutprefix == LARGEFANOUTPREFIX:
             config = 0b10000000
-        self.idxfp.write(struct.pack('!BB', self.VERSION, config))
+        self.idxfp.write(struct.pack(b'!BB', self.VERSION, config))
 
 
 class indexparams(object):
@@ -540,11 +540,11 @@
         # converts the node prefix into an integer location in the fanout
         # table).
         if prefixsize == SMALLFANOUTPREFIX:
-            self.fanoutstruct = '!B'
+            self.fanoutstruct = b'!B'
         elif prefixsize == LARGEFANOUTPREFIX:
-            self.fanoutstruct = '!H'
+            self.fanoutstruct = b'!H'
         else:
-            raise ValueError("invalid fanout prefix size: %s" % prefixsize)
+            raise ValueError(b"invalid fanout prefix size: %s" % prefixsize)
 
         # The number of fanout table entries
         self.fanoutcount = 2 ** (prefixsize * 8)
--- a/hgext/remotefilelog/basestore.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/remotefilelog/basestore.py	Sun Oct 06 09:48:39 2019 -0400
@@ -39,14 +39,14 @@
         self._uid = os.getuid() if not pycompat.iswindows else None
 
         self._validatecachelog = self.ui.config(
-            "remotefilelog", "validatecachelog"
+            b"remotefilelog", b"validatecachelog"
         )
         self._validatecache = self.ui.config(
-            "remotefilelog", "validatecache", 'on'
+            b"remotefilelog", b"validatecache", b'on'
         )
-        if self._validatecache not in ('on', 'strict', 'off'):
-            self._validatecache = 'on'
-        if self._validatecache == 'off':
+        if self._validatecache not in (b'on', b'strict', b'off'):
+            self._validatecache = b'on'
+        if self._validatecache == b'off':
             self._validatecache = False
 
         if shared:
@@ -59,8 +59,8 @@
             exists = os.path.exists(filepath)
             if (
                 exists
-                and self._validatecache == 'strict'
-                and not self._validatekey(filepath, 'contains')
+                and self._validatecache == b'strict'
+                and not self._validatekey(filepath, b'contains')
             ):
                 exists = False
             if not exists:
@@ -84,7 +84,7 @@
         entries = ledger.sources.get(self, [])
         count = 0
         progress = ui.makeprogress(
-            _("cleaning up"), unit="files", total=len(entries)
+            _(b"cleaning up"), unit=b"files", total=len(entries)
         )
         for entry in entries:
             if entry.gced or (entry.datarepacked and entry.historyrepacked):
@@ -121,7 +121,7 @@
                     pass
 
             elif stat.S_ISREG(mode):
-                if name.endswith('_old'):
+                if name.endswith(b'_old'):
                     oldfiles.add(name[:-4])
                 else:
                     otherfiles.add(name)
@@ -130,7 +130,7 @@
         # corresponding file without the suffix '_old'. See addremotefilelognode
         # method for the generation/purpose of files with '_old' suffix.
         for filename in oldfiles - otherfiles:
-            filepath = os.path.join(rootdir, filename + '_old')
+            filepath = os.path.join(rootdir, filename + b'_old')
             util.tryunlink(filepath)
 
     def _getfiles(self):
@@ -164,7 +164,7 @@
         missingfilename = set(hashes)
 
         # Start with a full manifest, since it'll cover the majority of files
-        for filename in self.repo['tip'].manifest():
+        for filename in self.repo[b'tip'].manifest():
             sha = hashlib.sha1(filename).digest()
             if sha in missingfilename:
                 filenames[filename] = sha
@@ -224,13 +224,13 @@
             data = shallowutil.readfile(filepath)
             if self._validatecache and not self._validatedata(data, filepath):
                 if self._validatecachelog:
-                    with open(self._validatecachelog, 'a+') as f:
-                        f.write("corrupt %s during read\n" % filepath)
-                os.rename(filepath, filepath + ".corrupt")
-                raise KeyError("corrupt local cache file %s" % filepath)
+                    with open(self._validatecachelog, b'a+') as f:
+                        f.write(b"corrupt %s during read\n" % filepath)
+                os.rename(filepath, filepath + b".corrupt")
+                raise KeyError(b"corrupt local cache file %s" % filepath)
         except IOError:
             raise KeyError(
-                "no file found at %s for %s:%s" % (filepath, name, hex(node))
+                b"no file found at %s for %s:%s" % (filepath, name, hex(node))
             )
 
         return data
@@ -243,7 +243,7 @@
             # if this node already exists, save the old version for
             # recovery/debugging purposes.
             if os.path.exists(filepath):
-                newfilename = filepath + '_old'
+                newfilename = filepath + b'_old'
                 # newfilename can be read-only and shutil.copy will fail.
                 # Delete newfilename to avoid it
                 if os.path.exists(newfilename):
@@ -254,9 +254,9 @@
             shallowutil.writefile(filepath, data, readonly=True)
 
             if self._validatecache:
-                if not self._validatekey(filepath, 'write'):
+                if not self._validatekey(filepath, b'write'):
                     raise error.Abort(
-                        _("local cache write was corrupted %s") % filepath
+                        _(b"local cache write was corrupted %s") % filepath
                     )
         finally:
             os.umask(oldumask)
@@ -267,26 +267,26 @@
         collection, since it allows us to insecpt the repos to see what nodes
         they want to be kept alive in the store.
         """
-        repospath = os.path.join(self._path, "repos")
-        with open(repospath, 'ab') as reposfile:
-            reposfile.write(os.path.dirname(path) + "\n")
+        repospath = os.path.join(self._path, b"repos")
+        with open(repospath, b'ab') as reposfile:
+            reposfile.write(os.path.dirname(path) + b"\n")
 
         repospathstat = os.stat(repospath)
         if repospathstat.st_uid == self._uid:
             os.chmod(repospath, 0o0664)
 
     def _validatekey(self, path, action):
-        with open(path, 'rb') as f:
+        with open(path, b'rb') as f:
             data = f.read()
 
         if self._validatedata(data, path):
             return True
 
         if self._validatecachelog:
-            with open(self._validatecachelog, 'ab+') as f:
-                f.write("corrupt %s during %s\n" % (path, action))
+            with open(self._validatecachelog, b'ab+') as f:
+                f.write(b"corrupt %s during %s\n" % (path, action))
 
-        os.rename(path, path + ".corrupt")
+        os.rename(path, path + b".corrupt")
         return False
 
     def _validatedata(self, data, path):
@@ -327,16 +327,16 @@
         limit = time.time() - (60 * 60 * 24)
 
         progress = ui.makeprogress(
-            _("removing unnecessary files"), unit="files"
+            _(b"removing unnecessary files"), unit=b"files"
         )
         progress.update(0)
         for root, dirs, files in os.walk(cachepath):
             for file in files:
-                if file == 'repos':
+                if file == b'repos':
                     continue
 
                 # Don't delete pack files
-                if '/packs/' in root:
+                if b'/packs/' in root:
                     continue
 
                 progress.update(count)
@@ -349,7 +349,9 @@
                     # errno.ENOENT = no such file or directory
                     if e.errno != errno.ENOENT:
                         raise
-                    msg = _("warning: file %s was removed by another process\n")
+                    msg = _(
+                        b"warning: file %s was removed by another process\n"
+                    )
                     ui.warn(msg % path)
                     continue
 
@@ -366,8 +368,8 @@
                         if e.errno != errno.ENOENT:
                             raise
                         msg = _(
-                            "warning: file %s was removed by another "
-                            "process\n"
+                            b"warning: file %s was removed by another "
+                            b"process\n"
                         )
                         ui.warn(msg % path)
                         continue
@@ -375,11 +377,11 @@
         progress.complete()
 
         # remove oldest files until under limit
-        limit = ui.configbytes("remotefilelog", "cachelimit")
+        limit = ui.configbytes(b"remotefilelog", b"cachelimit")
         if size > limit:
             excess = size - limit
             progress = ui.makeprogress(
-                _("enforcing cache limit"), unit="bytes", total=excess
+                _(b"enforcing cache limit"), unit=b"bytes", total=excess
             )
             removedexcess = 0
             while queue and size > limit and size > 0:
@@ -391,7 +393,9 @@
                     # errno.ENOENT = no such file or directory
                     if e.errno != errno.ENOENT:
                         raise
-                    msg = _("warning: file %s was removed by another process\n")
+                    msg = _(
+                        b"warning: file %s was removed by another process\n"
+                    )
                     ui.warn(msg % oldpath)
                 size -= oldpathstat.st_size
                 removed += 1
@@ -399,7 +403,7 @@
             progress.complete()
 
         ui.status(
-            _("finished: removed %d of %d files (%0.2f GB to %0.2f GB)\n")
+            _(b"finished: removed %d of %d files (%0.2f GB to %0.2f GB)\n")
             % (
                 removed,
                 count,
@@ -422,7 +426,7 @@
 
     def markforrefresh(self):
         for store in self.stores:
-            if util.safehasattr(store, 'markforrefresh'):
+            if util.safehasattr(store, b'markforrefresh'):
                 store.markforrefresh()
 
     @staticmethod
@@ -436,7 +440,7 @@
             i = 0
             while i < self.numattempts:
                 if i > 0:
-                    retrylog('re-attempting (n=%d) %s\n' % (i, funcname))
+                    retrylog(b're-attempting (n=%d) %s\n' % (i, funcname))
                     self.markforrefresh()
                 i += 1
                 try:
@@ -445,7 +449,7 @@
                     if i == self.numattempts:
                         # retries exhausted
                         retrylog(
-                            'retries exhausted in %s, raising KeyError\n'
+                            b'retries exhausted in %s, raising KeyError\n'
                             % pycompat.sysbytes(funcname)
                         )
                         raise
--- a/hgext/remotefilelog/connectionpool.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/remotefilelog/connectionpool.py	Sun Oct 06 09:48:39 2019 -0400
@@ -46,13 +46,13 @@
                 # close pipee first so peer.cleanup reading it won't deadlock,
                 # if there are other processes with pipeo open (i.e. us).
                 peer = orig.im_self
-                if util.safehasattr(peer, 'pipee'):
+                if util.safehasattr(peer, b'pipee'):
                     peer.pipee.close()
                 return orig()
 
             peer = hg.peer(self._repo.ui, {}, path)
-            if util.safehasattr(peer, 'cleanup'):
-                extensions.wrapfunction(peer, 'cleanup', _cleanup)
+            if util.safehasattr(peer, b'cleanup'):
+                extensions.wrapfunction(peer, b'cleanup', _cleanup)
 
             conn = connection(pathpool, peer)
 
@@ -83,5 +83,5 @@
             self.close()
 
     def close(self):
-        if util.safehasattr(self.peer, 'cleanup'):
+        if util.safehasattr(self.peer, b'cleanup'):
             self.peer.cleanup()
--- a/hgext/remotefilelog/constants.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/remotefilelog/constants.py	Sun Oct 06 09:48:39 2019 -0400
@@ -4,40 +4,40 @@
 
 from mercurial.i18n import _
 
-NETWORK_CAP_LEGACY_SSH_GETFILES = 'exp-remotefilelog-ssh-getfiles-1'
+NETWORK_CAP_LEGACY_SSH_GETFILES = b'exp-remotefilelog-ssh-getfiles-1'
 
-SHALLOWREPO_REQUIREMENT = "exp-remotefilelog-repo-req-1"
+SHALLOWREPO_REQUIREMENT = b"exp-remotefilelog-repo-req-1"
 
-BUNDLE2_CAPABLITY = "exp-remotefilelog-b2cap-1"
+BUNDLE2_CAPABLITY = b"exp-remotefilelog-b2cap-1"
 
-FILENAMESTRUCT = '!H'
+FILENAMESTRUCT = b'!H'
 FILENAMESIZE = struct.calcsize(FILENAMESTRUCT)
 
 NODESIZE = 20
-PACKREQUESTCOUNTSTRUCT = '!I'
+PACKREQUESTCOUNTSTRUCT = b'!I'
 
-NODECOUNTSTRUCT = '!I'
+NODECOUNTSTRUCT = b'!I'
 NODECOUNTSIZE = struct.calcsize(NODECOUNTSTRUCT)
 
-PATHCOUNTSTRUCT = '!I'
+PATHCOUNTSTRUCT = b'!I'
 PATHCOUNTSIZE = struct.calcsize(PATHCOUNTSTRUCT)
 
-FILEPACK_CATEGORY = ""
-TREEPACK_CATEGORY = "manifests"
+FILEPACK_CATEGORY = b""
+TREEPACK_CATEGORY = b"manifests"
 
 ALL_CATEGORIES = [FILEPACK_CATEGORY, TREEPACK_CATEGORY]
 
 # revision metadata keys. must be a single character.
-METAKEYFLAG = 'f'  # revlog flag
-METAKEYSIZE = 's'  # full rawtext size
+METAKEYFLAG = b'f'  # revlog flag
+METAKEYSIZE = b's'  # full rawtext size
 
 
 def getunits(category):
     if category == FILEPACK_CATEGORY:
-        return _("files")
+        return _(b"files")
     if category == TREEPACK_CATEGORY:
-        return _("trees")
+        return _(b"trees")
 
 
 # Repack options passed to ``markledger``.
-OPTION_PACKSONLY = 'packsonly'
+OPTION_PACKSONLY = b'packsonly'
--- a/hgext/remotefilelog/contentstore.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/remotefilelog/contentstore.py	Sun Oct 06 09:48:39 2019 -0400
@@ -136,7 +136,7 @@
 
     def add(self, name, node, data):
         raise RuntimeError(
-            "cannot add content only to remotefilelog " "contentstore"
+            b"cannot add content only to remotefilelog " b"contentstore"
         )
 
     def getmissing(self, keys):
@@ -150,7 +150,7 @@
         if self.writestore:
             self.writestore.addremotefilelognode(name, node, data)
         else:
-            raise RuntimeError("no writable store configured")
+            raise RuntimeError(b"no writable store configured")
 
     def markledger(self, ledger, options=None):
         for store in self.stores:
@@ -208,7 +208,7 @@
 
     def add(self, name, node, data):
         raise RuntimeError(
-            "cannot add content only to remotefilelog " "contentstore"
+            b"cannot add content only to remotefilelog " b"contentstore"
         )
 
     def _sanitizemetacache(self):
@@ -255,7 +255,7 @@
         return self._shared.getmeta(name, node)
 
     def add(self, name, node, data):
-        raise RuntimeError("cannot add to a remote store")
+        raise RuntimeError(b"cannot add to a remote store")
 
     def getmissing(self, keys):
         return keys
@@ -269,7 +269,7 @@
         self._store = repo.store
         self._svfs = repo.svfs
         self._revlogs = dict()
-        self._cl = revlog.revlog(self._svfs, '00changelog.i')
+        self._cl = revlog.revlog(self._svfs, b'00changelog.i')
         self._repackstartlinkrev = 0
 
     def get(self, name, node):
@@ -311,7 +311,7 @@
                 missing.add(p2)
 
             linknode = self._cl.node(rl.linkrev(ancrev))
-            ancestors[rl.node(ancrev)] = (p1, p2, linknode, '')
+            ancestors[rl.node(ancrev)] = (p1, p2, linknode, b'')
             if not missing:
                 break
         return ancestors
@@ -324,14 +324,14 @@
         return (parents[0], parents[1], cl.node(linkrev), None)
 
     def add(self, *args):
-        raise RuntimeError("cannot add to a revlog store")
+        raise RuntimeError(b"cannot add to a revlog store")
 
     def _revlog(self, name):
         rl = self._revlogs.get(name)
         if rl is None:
-            revlogname = '00manifesttree.i'
-            if name != '':
-                revlogname = 'meta/%s/00manifest.i' % name
+            revlogname = b'00manifesttree.i'
+            if name != b'':
+                revlogname = b'meta/%s/00manifest.i' % name
             rl = revlog.revlog(self._svfs, revlogname)
             self._revlogs[name] = rl
         return rl
@@ -352,8 +352,8 @@
     def markledger(self, ledger, options=None):
         if options and options.get(constants.OPTION_PACKSONLY):
             return
-        treename = ''
-        rl = revlog.revlog(self._svfs, '00manifesttree.i')
+        treename = b''
+        rl = revlog.revlog(self._svfs, b'00manifesttree.i')
         startlinkrev = self._repackstartlinkrev
         endlinkrev = self._repackendlinkrev
         for rev in pycompat.xrange(len(rl) - 1, -1, -1):
@@ -367,10 +367,10 @@
             ledger.markhistoryentry(self, treename, node)
 
         for path, encoded, size in self._store.datafiles():
-            if path[:5] != 'meta/' or path[-2:] != '.i':
+            if path[:5] != b'meta/' or path[-2:] != b'.i':
                 continue
 
-            treename = path[5 : -len('/00manifest.i')]
+            treename = path[5 : -len(b'/00manifest.i')]
 
             rl = revlog.revlog(self._svfs, path)
             for rev in pycompat.xrange(len(rl) - 1, -1, -1):
--- a/hgext/remotefilelog/datapack.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/remotefilelog/datapack.py	Sun Oct 06 09:48:39 2019 -0400
@@ -21,8 +21,8 @@
 FULLTEXTINDEXMARK = -1
 NOBASEINDEXMARK = -2
 
-INDEXSUFFIX = '.dataidx'
-PACKSUFFIX = '.datapack'
+INDEXSUFFIX = b'.dataidx'
+PACKSUFFIX = b'.datapack'
 
 
 class datapackstore(basepack.basepackstore):
@@ -36,7 +36,7 @@
         return datapack(path)
 
     def get(self, name, node):
-        raise RuntimeError("must use getdeltachain with datapackstore")
+        raise RuntimeError(b"must use getdeltachain with datapackstore")
 
     def getmeta(self, name, node):
         for pack in self.packs:
@@ -84,7 +84,7 @@
         raise KeyError((name, hex(node)))
 
     def add(self, name, node, data):
-        raise RuntimeError("cannot add to datapackstore")
+        raise RuntimeError(b"cannot add to datapackstore")
 
 
 class datapack(basepack.basepack):
@@ -93,7 +93,7 @@
 
     # Format is <node><delta offset><pack data offset><pack data size>
     # See the mutabledatapack doccomment for more details.
-    INDEXFORMAT = '!20siQQ'
+    INDEXFORMAT = b'!20siQQ'
     INDEXENTRYLENGTH = 40
 
     SUPPORTED_VERSIONS = [2]
@@ -109,7 +109,7 @@
 
     def get(self, name, node):
         raise RuntimeError(
-            "must use getdeltachain with datapack (%s:%s)" % (name, hex(node))
+            b"must use getdeltachain with datapack (%s:%s)" % (name, hex(node))
         )
 
     def getmeta(self, name, node):
@@ -122,11 +122,11 @@
 
         # see docstring of mutabledatapack for the format
         offset = 0
-        offset += struct.unpack_from('!H', rawentry, offset)[0] + 2  # filename
+        offset += struct.unpack_from(b'!H', rawentry, offset)[0] + 2  # filename
         offset += 40  # node, deltabase node
-        offset += struct.unpack_from('!Q', rawentry, offset)[0] + 8  # delta
+        offset += struct.unpack_from(b'!Q', rawentry, offset)[0] + 8  # delta
 
-        metalen = struct.unpack_from('!I', rawentry, offset)[0]
+        metalen = struct.unpack_from(b'!I', rawentry, offset)[0]
         offset += 4
 
         meta = shallowutil.parsepackmeta(rawentry[offset : offset + metalen])
@@ -186,7 +186,7 @@
 
         # <2 byte len> + <filename>
         lengthsize = 2
-        filenamelen = struct.unpack('!H', rawentry[:2])[0]
+        filenamelen = struct.unpack(b'!H', rawentry[:2])[0]
         filename = rawentry[lengthsize : lengthsize + filenamelen]
 
         # <20 byte node> + <20 byte deltabase>
@@ -198,14 +198,14 @@
         # <8 byte len> + <delta>
         deltastart = deltabasestart + NODELENGTH
         rawdeltalen = rawentry[deltastart : deltastart + 8]
-        deltalen = struct.unpack('!Q', rawdeltalen)[0]
+        deltalen = struct.unpack(b'!Q', rawdeltalen)[0]
 
         delta = rawentry[deltastart + 8 : deltastart + 8 + deltalen]
         delta = self._decompress(delta)
 
         if getmeta:
             metastart = deltastart + 8 + deltalen
-            metalen = struct.unpack_from('!I', rawentry, metastart)[0]
+            metalen = struct.unpack_from(b'!I', rawentry, metastart)[0]
 
             rawmeta = rawentry[metastart + 4 : metastart + 4 + metalen]
             meta = shallowutil.parsepackmeta(rawmeta)
@@ -217,7 +217,7 @@
         return zlib.decompress(data)
 
     def add(self, name, node, data):
-        raise RuntimeError("cannot add to datapack (%s:%s)" % (name, node))
+        raise RuntimeError(b"cannot add to datapack (%s:%s)" % (name, node))
 
     def _find(self, node):
         params = self.params
@@ -292,7 +292,7 @@
             oldoffset = offset
 
             # <2 byte len> + <filename>
-            filenamelen = struct.unpack('!H', data[offset : offset + 2])[0]
+            filenamelen = struct.unpack(b'!H', data[offset : offset + 2])[0]
             offset += 2
             filename = data[offset : offset + filenamelen]
             offset += filenamelen
@@ -306,7 +306,7 @@
 
             # <8 byte len> + <delta>
             rawdeltalen = data[offset : offset + 8]
-            deltalen = struct.unpack('!Q', rawdeltalen)[0]
+            deltalen = struct.unpack(b'!Q', rawdeltalen)[0]
             offset += 8
 
             # TODO(augie): we should store a header that is the
@@ -317,7 +317,7 @@
             offset += deltalen
 
             # <4 byte len> + <metadata-list>
-            metalen = struct.unpack_from('!I', data, offset)[0]
+            metalen = struct.unpack_from(b'!I', data, offset)[0]
             offset += 4 + metalen
 
             yield (filename, node, deltabase, uncompressedlen)
@@ -415,9 +415,9 @@
     def add(self, name, node, deltabasenode, delta, metadata=None):
         # metadata is a dict, ex. {METAKEYFLAG: flag}
         if len(name) > 2 ** 16:
-            raise RuntimeError(_("name too long %s") % name)
+            raise RuntimeError(_(b"name too long %s") % name)
         if len(node) != 20:
-            raise RuntimeError(_("node should be 20 bytes %s") % node)
+            raise RuntimeError(_(b"node should be 20 bytes %s") % node)
 
         if node in self.entries:
             # The revision has already been added
@@ -426,20 +426,20 @@
         # TODO: allow configurable compression
         delta = self._compress(delta)
 
-        rawdata = ''.join(
+        rawdata = b''.join(
             (
-                struct.pack('!H', len(name)),  # unsigned 2 byte int
+                struct.pack(b'!H', len(name)),  # unsigned 2 byte int
                 name,
                 node,
                 deltabasenode,
-                struct.pack('!Q', len(delta)),  # unsigned 8 byte int
+                struct.pack(b'!Q', len(delta)),  # unsigned 8 byte int
                 delta,
             )
         )
 
         # v1 support metadata
         rawmeta = shallowutil.buildpackmeta(metadata)
-        rawdata += struct.pack('!I', len(rawmeta))  # unsigned 4 byte
+        rawdata += struct.pack(b'!I', len(rawmeta))  # unsigned 4 byte
         rawdata += rawmeta
 
         offset = self.packfp.tell()
@@ -455,7 +455,7 @@
             (n, db, o, s) for n, (db, o, s) in self.entries.iteritems()
         )
 
-        rawindex = ''
+        rawindex = b''
         fmt = self.INDEXFORMAT
         for node, deltabase, offset, size in entries:
             if deltabase == nullid:
--- a/hgext/remotefilelog/debugcommands.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/remotefilelog/debugcommands.py	Sun Oct 06 09:48:39 2019 -0400
@@ -35,13 +35,13 @@
 
     size, firstnode, mapping = parsefileblob(path, decompress)
 
-    ui.status(_("size: %d bytes\n") % size)
-    ui.status(_("path: %s \n") % path)
-    ui.status(_("key: %s \n") % (short(firstnode)))
-    ui.status(_("\n"))
+    ui.status(_(b"size: %d bytes\n") % size)
+    ui.status(_(b"path: %s \n") % path)
+    ui.status(_(b"key: %s \n") % (short(firstnode)))
+    ui.status(_(b"\n"))
     ui.status(
-        _("%12s => %12s %13s %13s %12s\n")
-        % ("node", "p1", "p2", "linknode", "copyfrom")
+        _(b"%12s => %12s %13s %13s %12s\n")
+        % (b"node", b"p1", b"p2", b"linknode", b"copyfrom")
     )
 
     queue = [firstnode]
@@ -49,7 +49,7 @@
         node = queue.pop(0)
         p1, p2, linknode, copyfrom = mapping[node]
         ui.status(
-            _("%s => %s  %s  %s  %s\n")
+            _(b"%s => %s  %s  %s  %s\n")
             % (short(node), short(p1), short(p2), short(linknode), copyfrom)
         )
         if p1 != nullid:
@@ -61,21 +61,21 @@
 def buildtemprevlog(repo, file):
     # get filename key
     filekey = nodemod.hex(hashlib.sha1(file).digest())
-    filedir = os.path.join(repo.path, 'store/data', filekey)
+    filedir = os.path.join(repo.path, b'store/data', filekey)
 
     # sort all entries based on linkrev
     fctxs = []
     for filenode in os.listdir(filedir):
-        if '_old' not in filenode:
+        if b'_old' not in filenode:
             fctxs.append(repo.filectx(file, fileid=bin(filenode)))
 
     fctxs = sorted(fctxs, key=lambda x: x.linkrev())
 
     # add to revlog
-    temppath = repo.sjoin('data/temprevlog.i')
+    temppath = repo.sjoin(b'data/temprevlog.i')
     if os.path.exists(temppath):
         os.remove(temppath)
-    r = filelog.filelog(repo.svfs, 'temprevlog')
+    r = filelog.filelog(repo.svfs, b'temprevlog')
 
     class faket(object):
         def add(self, a, b, c):
@@ -89,8 +89,8 @@
         p = fctx.filelog().parents(fctx.filenode())
         meta = {}
         if fctx.renamed():
-            meta['copy'] = fctx.renamed()[0]
-            meta['copyrev'] = hex(fctx.renamed()[1])
+            meta[b'copy'] = fctx.renamed()[0]
+            meta[b'copyrev'] = hex(fctx.renamed()[1])
 
         r.add(fctx.data(), meta, t, fctx.linkrev(), p[0], p[1])
 
@@ -111,29 +111,29 @@
     r = buildtemprevlog(repo, file_)
 
     # debugindex like normal
-    format = opts.get('format', 0)
+    format = opts.get(b'format', 0)
     if format not in (0, 1):
-        raise error.Abort(_("unknown format %d") % format)
+        raise error.Abort(_(b"unknown format %d") % format)
 
     generaldelta = r.version & revlog.FLAG_GENERALDELTA
     if generaldelta:
-        basehdr = ' delta'
+        basehdr = b' delta'
     else:
-        basehdr = '  base'
+        basehdr = b'  base'
 
     if format == 0:
         ui.write(
             (
-                "   rev    offset  length " + basehdr + " linkrev"
-                " nodeid       p1           p2\n"
+                b"   rev    offset  length " + basehdr + b" linkrev"
+                b" nodeid       p1           p2\n"
             )
         )
     elif format == 1:
         ui.write(
             (
-                "   rev flag   offset   length"
-                "     size " + basehdr + "   link     p1     p2"
-                "       nodeid\n"
+                b"   rev flag   offset   length"
+                b"     size " + basehdr + b"   link     p1     p2"
+                b"       nodeid\n"
             )
         )
 
@@ -149,7 +149,7 @@
             except Exception:
                 pp = [nullid, nullid]
             ui.write(
-                "% 6d % 9d % 7d % 6d % 7d %s %s %s\n"
+                b"% 6d % 9d % 7d % 6d % 7d %s %s %s\n"
                 % (
                     i,
                     r.start(i),
@@ -164,7 +164,7 @@
         elif format == 1:
             pr = r.parentrevs(i)
             ui.write(
-                "% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n"
+                b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n"
                 % (
                     i,
                     r.flags(i),
@@ -187,14 +187,14 @@
 
     r = buildtemprevlog(repo, os.path.basename(file_)[:-2])
 
-    ui.write("digraph G {\n")
+    ui.write(b"digraph G {\n")
     for i in r:
         node = r.node(i)
         pp = r.parents(node)
-        ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
+        ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
         if pp[1] != nullid:
-            ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
-    ui.write("}\n")
+            ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
+    ui.write(b"}\n")
 
 
 def verifyremotefilelog(ui, path, **opts):
@@ -202,7 +202,7 @@
 
     for root, dirs, files in os.walk(path):
         for file in files:
-            if file == "repos":
+            if file == b"repos":
                 continue
             filepath = os.path.join(root, file)
             size, firstnode, mapping = parsefileblob(filepath, decompress)
@@ -210,10 +210,10 @@
                 if linknode == nullid:
                     actualpath = os.path.relpath(root, path)
                     key = fileserverclient.getcachekey(
-                        "reponame", actualpath, file
+                        b"reponame", actualpath, file
                     )
                     ui.status(
-                        "%s %s\n" % (key, os.path.relpath(filepath, path))
+                        b"%s %s\n" % (key, os.path.relpath(filepath, path))
                     )
 
 
@@ -222,7 +222,7 @@
 
 
 def parsefileblob(path, decompress):
-    f = open(path, "rb")
+    f = open(path, b"rb")
     try:
         raw = f.read()
     finally:
@@ -238,7 +238,7 @@
 
     mapping = {}
     while start < len(raw):
-        divider = raw.index('\0', start + 80)
+        divider = raw.index(b'\0', start + 80)
 
         currentnode = raw[start : (start + 20)]
         if not firstnode:
@@ -257,13 +257,13 @@
 
 def debugdatapack(ui, *paths, **opts):
     for path in paths:
-        if '.data' in path:
-            path = path[: path.index('.data')]
-        ui.write("%s:\n" % path)
+        if b'.data' in path:
+            path = path[: path.index(b'.data')]
+        ui.write(b"%s:\n" % path)
         dpack = datapack.datapack(path)
         node = opts.get(r'node')
         if node:
-            deltachain = dpack.getdeltachain('', bin(node))
+            deltachain = dpack.getdeltachain(b'', bin(node))
             dumpdeltachain(ui, deltachain, **opts)
             return
 
@@ -280,21 +280,21 @@
 
         def printtotals():
             if lastfilename is not None:
-                ui.write("\n")
+                ui.write(b"\n")
             if not totaldeltasize or not totalblobsize:
                 return
             difference = totalblobsize - totaldeltasize
-            deltastr = "%0.1f%% %s" % (
+            deltastr = b"%0.1f%% %s" % (
                 (100.0 * abs(difference) / totalblobsize),
-                ("smaller" if difference > 0 else "bigger"),
+                (b"smaller" if difference > 0 else b"bigger"),
             )
 
             ui.write(
-                "Total:%s%s  %s (%s)\n"
+                b"Total:%s%s  %s (%s)\n"
                 % (
-                    "".ljust(2 * hashlen - len("Total:")),
-                    ('%d' % totaldeltasize).ljust(12),
-                    ('%d' % totalblobsize).ljust(9),
+                    b"".ljust(2 * hashlen - len(b"Total:")),
+                    (b'%d' % totaldeltasize).ljust(12),
+                    (b'%d' % totalblobsize).ljust(9),
                     deltastr,
                 )
             )
@@ -305,20 +305,20 @@
         for filename, node, deltabase, deltalen in dpack.iterentries():
             bases[node] = deltabase
             if node in nodes:
-                ui.write(("Bad entry: %s appears twice\n" % short(node)))
+                ui.write((b"Bad entry: %s appears twice\n" % short(node)))
                 failures += 1
             nodes.add(node)
             if filename != lastfilename:
                 printtotals()
-                name = '(empty name)' if filename == '' else filename
-                ui.write("%s:\n" % name)
+                name = b'(empty name)' if filename == b'' else filename
+                ui.write(b"%s:\n" % name)
                 ui.write(
-                    "%s%s%s%s\n"
+                    b"%s%s%s%s\n"
                     % (
-                        "Node".ljust(hashlen),
-                        "Delta Base".ljust(hashlen),
-                        "Delta Length".ljust(14),
-                        "Blob Size".ljust(9),
+                        b"Node".ljust(hashlen),
+                        b"Delta Base".ljust(hashlen),
+                        b"Delta Length".ljust(14),
+                        b"Blob Size".ljust(9),
                     )
                 )
                 lastfilename = filename
@@ -332,13 +332,13 @@
                 totaldeltasize += deltalen
                 totalblobsize += blobsize
             else:
-                blobsize = "(missing)"
+                blobsize = b"(missing)"
             ui.write(
-                "%s  %s  %s%s\n"
+                b"%s  %s  %s%s\n"
                 % (
                     hashformatter(node),
                     hashformatter(deltabase),
-                    ('%d' % deltalen).ljust(14),
+                    (b'%d' % deltalen).ljust(14),
                     pycompat.bytestr(blobsize),
                 )
             )
@@ -348,7 +348,7 @@
 
         failures += _sanitycheck(ui, set(nodes), bases)
         if failures > 1:
-            ui.warn(("%d failures\n" % failures))
+            ui.warn((b"%d failures\n" % failures))
             return 1
 
 
@@ -370,7 +370,7 @@
             if deltabase not in nodes:
                 ui.warn(
                     (
-                        "Bad entry: %s has an unknown deltabase (%s)\n"
+                        b"Bad entry: %s has an unknown deltabase (%s)\n"
                         % (short(node), short(deltabase))
                     )
                 )
@@ -380,7 +380,7 @@
             if deltabase in seen:
                 ui.warn(
                     (
-                        "Bad entry: %s has a cycle (at %s)\n"
+                        b"Bad entry: %s has a cycle (at %s)\n"
                         % (short(node), short(deltabase))
                     )
                 )
@@ -403,20 +403,20 @@
     lastfilename = None
     for filename, node, filename, deltabasenode, delta in deltachain:
         if filename != lastfilename:
-            ui.write("\n%s\n" % filename)
+            ui.write(b"\n%s\n" % filename)
             lastfilename = filename
         ui.write(
-            "%s  %s  %s  %s\n"
+            b"%s  %s  %s  %s\n"
             % (
-                "Node".ljust(hashlen),
-                "Delta Base".ljust(hashlen),
-                "Delta SHA1".ljust(hashlen),
-                "Delta Length".ljust(6),
+                b"Node".ljust(hashlen),
+                b"Delta Base".ljust(hashlen),
+                b"Delta SHA1".ljust(hashlen),
+                b"Delta Length".ljust(6),
             )
         )
 
         ui.write(
-            "%s  %s  %s  %d\n"
+            b"%s  %s  %s  %d\n"
             % (
                 hashformatter(node),
                 hashformatter(deltabasenode),
@@ -427,28 +427,28 @@
 
 
 def debughistorypack(ui, path):
-    if '.hist' in path:
-        path = path[: path.index('.hist')]
+    if b'.hist' in path:
+        path = path[: path.index(b'.hist')]
     hpack = historypack.historypack(path)
 
     lastfilename = None
     for entry in hpack.iterentries():
         filename, node, p1node, p2node, linknode, copyfrom = entry
         if filename != lastfilename:
-            ui.write("\n%s\n" % filename)
+            ui.write(b"\n%s\n" % filename)
             ui.write(
-                "%s%s%s%s%s\n"
+                b"%s%s%s%s%s\n"
                 % (
-                    "Node".ljust(14),
-                    "P1 Node".ljust(14),
-                    "P2 Node".ljust(14),
-                    "Link Node".ljust(14),
-                    "Copy From",
+                    b"Node".ljust(14),
+                    b"P1 Node".ljust(14),
+                    b"P2 Node".ljust(14),
+                    b"Link Node".ljust(14),
+                    b"Copy From",
                 )
             )
             lastfilename = filename
         ui.write(
-            "%s  %s  %s  %s  %s\n"
+            b"%s  %s  %s  %s  %s\n"
             % (
                 short(node),
                 short(p1node),
@@ -460,17 +460,17 @@
 
 
 def debugwaitonrepack(repo):
-    with lockmod.lock(repack.repacklockvfs(repo), "repacklock", timeout=-1):
+    with lockmod.lock(repack.repacklockvfs(repo), b"repacklock", timeout=-1):
         return
 
 
 def debugwaitonprefetch(repo):
     with repo._lock(
         repo.svfs,
-        "prefetchlock",
+        b"prefetchlock",
         True,
         None,
         None,
-        _('prefetching in %s') % repo.origroot,
+        _(b'prefetching in %s') % repo.origroot,
     ):
         pass
--- a/hgext/remotefilelog/fileserverclient.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/remotefilelog/fileserverclient.py	Sun Oct 06 09:48:39 2019 -0400
@@ -58,38 +58,38 @@
     class remotefilepeer(peer.__class__):
         @wireprotov1peer.batchable
         def x_rfl_getfile(self, file, node):
-            if not self.capable('x_rfl_getfile'):
+            if not self.capable(b'x_rfl_getfile'):
                 raise error.Abort(
-                    'configured remotefile server does not support getfile'
+                    b'configured remotefile server does not support getfile'
                 )
             f = wireprotov1peer.future()
-            yield {'file': file, 'node': node}, f
-            code, data = f.value.split('\0', 1)
+            yield {b'file': file, b'node': node}, f
+            code, data = f.value.split(b'\0', 1)
             if int(code):
                 raise error.LookupError(file, node, data)
             yield data
 
         @wireprotov1peer.batchable
         def x_rfl_getflogheads(self, path):
-            if not self.capable('x_rfl_getflogheads'):
+            if not self.capable(b'x_rfl_getflogheads'):
                 raise error.Abort(
-                    'configured remotefile server does not '
-                    'support getflogheads'
+                    b'configured remotefile server does not '
+                    b'support getflogheads'
                 )
             f = wireprotov1peer.future()
-            yield {'path': path}, f
-            heads = f.value.split('\n') if f.value else []
+            yield {b'path': path}, f
+            heads = f.value.split(b'\n') if f.value else []
             yield heads
 
         def _updatecallstreamopts(self, command, opts):
-            if command != 'getbundle':
+            if command != b'getbundle':
                 return
             if (
                 constants.NETWORK_CAP_LEGACY_SSH_GETFILES
                 not in self.capabilities()
             ):
                 return
-            if not util.safehasattr(self, '_localrepo'):
+            if not util.safehasattr(self, b'_localrepo'):
                 return
             if (
                 constants.SHALLOWREPO_REQUIREMENT
@@ -97,7 +97,7 @@
             ):
                 return
 
-            bundlecaps = opts.get('bundlecaps')
+            bundlecaps = opts.get(b'bundlecaps')
             if bundlecaps:
                 bundlecaps = [bundlecaps]
             else:
@@ -112,14 +112,14 @@
             # do this more cleanly.
             bundlecaps.append(constants.BUNDLE2_CAPABLITY)
             if self._localrepo.includepattern:
-                patterns = '\0'.join(self._localrepo.includepattern)
-                includecap = "includepattern=" + patterns
+                patterns = b'\0'.join(self._localrepo.includepattern)
+                includecap = b"includepattern=" + patterns
                 bundlecaps.append(includecap)
             if self._localrepo.excludepattern:
-                patterns = '\0'.join(self._localrepo.excludepattern)
-                excludecap = "excludepattern=" + patterns
+                patterns = b'\0'.join(self._localrepo.excludepattern)
+                excludecap = b"excludepattern=" + patterns
                 bundlecaps.append(excludecap)
-            opts['bundlecaps'] = ','.join(bundlecaps)
+            opts[b'bundlecaps'] = b','.join(bundlecaps)
 
         def _sendrequest(self, command, args, **opts):
             self._updatecallstreamopts(command, args)
@@ -129,7 +129,7 @@
 
         def _callstream(self, command, **opts):
             supertype = super(remotefilepeer, self)
-            if not util.safehasattr(supertype, '_sendrequest'):
+            if not util.safehasattr(supertype, b'_sendrequest'):
                 self._updatecallstreamopts(command, pycompat.byteskwargs(opts))
             return super(remotefilepeer, self)._callstream(command, **opts)
 
@@ -149,7 +149,7 @@
 
     def connect(self, cachecommand):
         if self.pipeo:
-            raise error.Abort(_("cache connection already open"))
+            raise error.Abort(_(b"cache connection already open"))
         self.pipei, self.pipeo, self.pipee, self.subprocess = procutil.popen4(
             cachecommand
         )
@@ -164,7 +164,7 @@
 
         if self.connected:
             try:
-                self.pipei.write("exit\n")
+                self.pipei.write(b"exit\n")
             except Exception:
                 pass
             tryclose(self.pipei)
@@ -223,7 +223,7 @@
         for m in missed:
             futures.append(
                 e.callcommand(
-                    'x_rfl_getfile', {'file': idmap[m], 'node': m[-40:]}
+                    b'x_rfl_getfile', {b'file': idmap[m], b'node': m[-40:]}
                 )
             )
 
@@ -232,14 +232,14 @@
             futures[i] = None  # release memory
             file_ = idmap[m]
             node = m[-40:]
-            receivemissing(io.BytesIO('%d\n%s' % (len(r), r)), file_, node)
+            receivemissing(io.BytesIO(b'%d\n%s' % (len(r), r)), file_, node)
             progresstick()
 
 
 def _getfiles_optimistic(
     remote, receivemissing, progresstick, missed, idmap, step
 ):
-    remote._callstream("x_rfl_getfiles")
+    remote._callstream(b"x_rfl_getfiles")
     i = 0
     pipeo = remote._pipeo
     pipei = remote._pipei
@@ -252,7 +252,7 @@
             # issue new request
             versionid = missingid[-40:]
             file = idmap[missingid]
-            sshrequest = "%s%s\n" % (versionid, file)
+            sshrequest = b"%s%s\n" % (versionid, file)
             pipeo.write(sshrequest)
         pipeo.flush()
 
@@ -264,14 +264,14 @@
             progresstick()
 
     # End the command
-    pipeo.write('\n')
+    pipeo.write(b'\n')
     pipeo.flush()
 
 
 def _getfiles_threaded(
     remote, receivemissing, progresstick, missed, idmap, step
 ):
-    remote._callstream("getfiles")
+    remote._callstream(b"getfiles")
     pipeo = remote._pipeo
     pipei = remote._pipei
 
@@ -279,7 +279,7 @@
         for missingid in missed:
             versionid = missingid[-40:]
             file = idmap[missingid]
-            sshrequest = "%s%s\n" % (versionid, file)
+            sshrequest = b"%s%s\n" % (versionid, file)
             pipeo.write(sshrequest)
         pipeo.flush()
 
@@ -295,7 +295,7 @@
 
     writerthread.join()
     # End the command
-    pipeo.write('\n')
+    pipeo.write(b'\n')
     pipeo.flush()
 
 
@@ -307,17 +307,17 @@
         ui = repo.ui
         self.repo = repo
         self.ui = ui
-        self.cacheprocess = ui.config("remotefilelog", "cacheprocess")
+        self.cacheprocess = ui.config(b"remotefilelog", b"cacheprocess")
         if self.cacheprocess:
             self.cacheprocess = util.expandpath(self.cacheprocess)
 
         # This option causes remotefilelog to pass the full file path to the
         # cacheprocess instead of a hashed key.
         self.cacheprocesspasspath = ui.configbool(
-            "remotefilelog", "cacheprocess.includepath"
+            b"remotefilelog", b"cacheprocess.includepath"
         )
 
-        self.debugoutput = ui.configbool("remotefilelog", "debug")
+        self.debugoutput = ui.configbool(b"remotefilelog", b"debug")
 
         self.remotecache = cacheconnection()
 
@@ -343,19 +343,19 @@
 
         repo = self.repo
         total = len(fileids)
-        request = "get\n%d\n" % total
+        request = b"get\n%d\n" % total
         idmap = {}
         reponame = repo.name
         for file, id in fileids:
             fullid = getcachekey(reponame, file, id)
             if self.cacheprocesspasspath:
-                request += file + '\0'
-            request += fullid + "\n"
+                request += file + b'\0'
+            request += fullid + b"\n"
             idmap[fullid] = file
 
         cache.request(request)
 
-        progress = self.ui.makeprogress(_('downloading'), total=total)
+        progress = self.ui.makeprogress(_(b'downloading'), total=total)
         progress.update(0)
 
         missed = []
@@ -368,16 +368,16 @@
                         missed.append(missingid)
                 self.ui.warn(
                     _(
-                        "warning: cache connection closed early - "
-                        + "falling back to server\n"
+                        b"warning: cache connection closed early - "
+                        + b"falling back to server\n"
                     )
                 )
                 break
-            if missingid == "0":
+            if missingid == b"0":
                 break
-            if missingid.startswith("_hits_"):
+            if missingid.startswith(b"_hits_"):
                 # receive progress reports
-                parts = missingid.split("_")
+                parts = missingid.split(b"_")
                 progress.increment(int(parts[2]))
                 continue
 
@@ -389,8 +389,8 @@
         fromcache = total - len(missed)
         progress.update(fromcache, total=total)
         self.ui.log(
-            "remotefilelog",
-            "remote cache hit rate is %r of %r\n",
+            b"remotefilelog",
+            b"remote cache hit rate is %r of %r\n",
             fromcache,
             total,
             hit=fromcache,
@@ -414,15 +414,15 @@
                         ):
                             if not isinstance(remote, _sshv1peer):
                                 raise error.Abort(
-                                    'remotefilelog requires ssh ' 'servers'
+                                    b'remotefilelog requires ssh ' b'servers'
                                 )
                             step = self.ui.configint(
-                                'remotefilelog', 'getfilesstep'
+                                b'remotefilelog', b'getfilesstep'
                             )
                             getfilestype = self.ui.config(
-                                'remotefilelog', 'getfilestype'
+                                b'remotefilelog', b'getfilestype'
                             )
-                            if getfilestype == 'threaded':
+                            if getfilestype == b'threaded':
                                 _getfiles = _getfiles_threaded
                             else:
                                 _getfiles = _getfiles_optimistic
@@ -434,13 +434,13 @@
                                 idmap,
                                 step,
                             )
-                        elif remote.capable("x_rfl_getfile"):
-                            if remote.capable('batch'):
+                        elif remote.capable(b"x_rfl_getfile"):
+                            if remote.capable(b'batch'):
                                 batchdefault = 100
                             else:
                                 batchdefault = 10
                             batchsize = self.ui.configint(
-                                'remotefilelog', 'batchsize', batchdefault
+                                b'remotefilelog', b'batchsize', batchdefault
                             )
                             self.ui.debug(
                                 b'requesting %d files from '
@@ -456,20 +456,20 @@
                             )
                         else:
                             raise error.Abort(
-                                "configured remotefilelog server"
-                                " does not support remotefilelog"
+                                b"configured remotefilelog server"
+                                b" does not support remotefilelog"
                             )
 
                     self.ui.log(
-                        "remotefilefetchlog",
-                        "Success\n",
+                        b"remotefilefetchlog",
+                        b"Success\n",
                         fetched_files=progress.pos - fromcache,
                         total_to_fetch=total - fromcache,
                     )
                 except Exception:
                     self.ui.log(
-                        "remotefilefetchlog",
-                        "Fail\n",
+                        b"remotefilefetchlog",
+                        b"Fail\n",
                         fetched_files=progress.pos - fromcache,
                         total_to_fetch=total - fromcache,
                     )
@@ -477,7 +477,7 @@
                 finally:
                     self.ui.verbose = verbose
                 # send to memcache
-                request = "set\n%d\n%s\n" % (len(missed), "\n".join(missed))
+                request = b"set\n%d\n%s\n" % (len(missed), b"\n".join(missed))
                 cache.request(request)
 
             progress.complete()
@@ -491,15 +491,15 @@
         line = pipe.readline()[:-1]
         if not line:
             raise error.ResponseError(
-                _("error downloading file contents:"),
-                _("connection closed early"),
+                _(b"error downloading file contents:"),
+                _(b"connection closed early"),
             )
         size = int(line)
         data = pipe.read(size)
         if len(data) != size:
             raise error.ResponseError(
-                _("error downloading file contents:"),
-                _("only received %s of %s bytes") % (len(data), size),
+                _(b"error downloading file contents:"),
+                _(b"only received %s of %s bytes") % (len(data), size),
             )
 
         self.writedata.addremotefilelognode(
@@ -508,7 +508,7 @@
 
     def connect(self):
         if self.cacheprocess:
-            cmd = "%s %s" % (self.cacheprocess, self.writedata._path)
+            cmd = b"%s %s" % (self.cacheprocess, self.writedata._path)
             self.remotecache.connect(cmd)
         else:
             # If no cache process is specified, we fake one that always
@@ -524,11 +524,11 @@
                     pass
 
                 def request(self, value, flush=True):
-                    lines = value.split("\n")
-                    if lines[0] != "get":
+                    lines = value.split(b"\n")
+                    if lines[0] != b"get":
                         return
                     self.missingids = lines[2:-1]
-                    self.missingids.append('0')
+                    self.missingids.append(b'0')
 
                 def receiveline(self):
                     if len(self.missingids) > 0:
@@ -540,8 +540,8 @@
     def close(self):
         if fetches:
             msg = (
-                "%d files fetched over %d fetches - "
-                + "(%d misses, %0.2f%% hit ratio) over %0.2fs\n"
+                b"%d files fetched over %d fetches - "
+                + b"(%d misses, %0.2f%% hit ratio) over %0.2fs\n"
             ) % (
                 fetched,
                 fetches,
@@ -552,8 +552,8 @@
             if self.debugoutput:
                 self.ui.warn(msg)
             self.ui.log(
-                "remotefilelog.prefetch",
-                msg.replace("%", "%%"),
+                b"remotefilelog.prefetch",
+                msg.replace(b"%", b"%%"),
                 remotefilelogfetched=fetched,
                 remotefilelogfetches=fetches,
                 remotefilelogfetchmisses=fetchmisses,
@@ -576,7 +576,7 @@
             # - workingctx produces ids with length 42,
             #   which we skip since they aren't in any cache
             if (
-                file == '.hgtags'
+                file == b'.hgtags'
                 or len(id) == 42
                 or not repo.shallowmatch(file)
             ):
@@ -605,10 +605,10 @@
             missingids = [(f, id) for f, id in missingids if id != nullid]
             repo.ui.develwarn(
                 (
-                    'remotefilelog not fetching %d null revs'
-                    ' - this is likely hiding bugs' % nullids
+                    b'remotefilelog not fetching %d null revs'
+                    b' - this is likely hiding bugs' % nullids
                 ),
-                config='remotefilelog-ext',
+                config=b'remotefilelog-ext',
             )
         if missingids:
             global fetches, fetched, fetchcost
@@ -619,10 +619,10 @@
             if fetches >= 15 and fetches < 18:
                 if fetches == 15:
                     fetchwarning = self.ui.config(
-                        'remotefilelog', 'fetchwarning'
+                        b'remotefilelog', b'fetchwarning'
                     )
                     if fetchwarning:
-                        self.ui.warn(fetchwarning + '\n')
+                        self.ui.warn(fetchwarning + b'\n')
                 self.logstacktrace()
             missingids = [(file, hex(id)) for file, id in sorted(missingids)]
             fetched += len(missingids)
@@ -630,14 +630,14 @@
             missingids = self.request(missingids)
             if missingids:
                 raise error.Abort(
-                    _("unable to download %d files") % len(missingids)
+                    _(b"unable to download %d files") % len(missingids)
                 )
             fetchcost += time.time() - start
             self._lfsprefetch(fileids)
 
     def _lfsprefetch(self, fileids):
         if not _lfsmod or not util.safehasattr(
-            self.repo.svfs, 'lfslocalblobstore'
+            self.repo.svfs, b'lfslocalblobstore'
         ):
             return
         if not _lfsmod.wrapper.candownload(self.repo):
@@ -661,7 +661,7 @@
         import traceback
 
         self.ui.log(
-            'remotefilelog',
-            'excess remotefilelog fetching:\n%s\n',
-            ''.join(traceback.format_stack()),
+            b'remotefilelog',
+            b'excess remotefilelog fetching:\n%s\n',
+            b''.join(traceback.format_stack()),
         )
--- a/hgext/remotefilelog/historypack.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/remotefilelog/historypack.py	Sun Oct 06 09:48:39 2019 -0400
@@ -15,21 +15,21 @@
 )
 
 # (filename hash, offset, size)
-INDEXFORMAT2 = '!20sQQII'
+INDEXFORMAT2 = b'!20sQQII'
 INDEXENTRYLENGTH2 = struct.calcsize(INDEXFORMAT2)
 NODELENGTH = 20
 
-NODEINDEXFORMAT = '!20sQ'
+NODEINDEXFORMAT = b'!20sQ'
 NODEINDEXENTRYLENGTH = struct.calcsize(NODEINDEXFORMAT)
 
 # (node, p1, p2, linknode)
-PACKFORMAT = "!20s20s20s20sH"
+PACKFORMAT = b"!20s20s20s20sH"
 PACKENTRYLENGTH = 82
 
 ENTRYCOUNTSIZE = 4
 
-INDEXSUFFIX = '.histidx'
-PACKSUFFIX = '.histpack'
+INDEXSUFFIX = b'.histidx'
+PACKSUFFIX = b'.histpack'
 
 ANC_NODE = 0
 ANC_P1NODE = 1
@@ -77,7 +77,7 @@
 
     def add(self, filename, node, p1, p2, linknode, copyfrom):
         raise RuntimeError(
-            "cannot add to historypackstore (%s:%s)" % (filename, hex(node))
+            b"cannot add to historypackstore (%s:%s)" % (filename, hex(node))
         )
 
 
@@ -168,7 +168,7 @@
 
     def add(self, filename, node, p1, p2, linknode, copyfrom):
         raise RuntimeError(
-            "cannot add to historypack (%s:%s)" % (filename, hex(node))
+            b"cannot add to historypack (%s:%s)" % (filename, hex(node))
         )
 
     def _findnode(self, name, node):
@@ -193,7 +193,7 @@
                 # with the copyfrom string.
                 return entry[:4] + (copyfrom,)
 
-        raise KeyError("unable to find history for %s:%s" % (name, hex(node)))
+        raise KeyError(b"unable to find history for %s:%s" % (name, hex(node)))
 
     def _findsection(self, name):
         params = self.params
@@ -222,19 +222,19 @@
         rawnamelen = self._index[
             nodeindexoffset : nodeindexoffset + constants.FILENAMESIZE
         ]
-        actualnamelen = struct.unpack('!H', rawnamelen)[0]
+        actualnamelen = struct.unpack(b'!H', rawnamelen)[0]
         nodeindexoffset += constants.FILENAMESIZE
         actualname = self._index[
             nodeindexoffset : nodeindexoffset + actualnamelen
         ]
         if actualname != name:
             raise KeyError(
-                "found file name %s when looking for %s" % (actualname, name)
+                b"found file name %s when looking for %s" % (actualname, name)
             )
         nodeindexoffset += actualnamelen
 
         filenamelength = struct.unpack(
-            '!H', self._data[offset : offset + constants.FILENAMESIZE]
+            b'!H', self._data[offset : offset + constants.FILENAMESIZE]
         )[0]
         offset += constants.FILENAMESIZE
 
@@ -243,7 +243,7 @@
 
         if name != actualname:
             raise KeyError(
-                "found file name %s when looking for %s" % (actualname, name)
+                b"found file name %s when looking for %s" % (actualname, name)
             )
 
         # Skip entry list size
@@ -311,14 +311,14 @@
             data = self._data
             # <2 byte len> + <filename>
             filenamelen = struct.unpack(
-                '!H', data[offset : offset + constants.FILENAMESIZE]
+                b'!H', data[offset : offset + constants.FILENAMESIZE]
             )[0]
             offset += constants.FILENAMESIZE
             filename = data[offset : offset + filenamelen]
             offset += filenamelen
 
             revcount = struct.unpack(
-                '!I', data[offset : offset + ENTRYCOUNTSIZE]
+                b'!I', data[offset : offset + ENTRYCOUNTSIZE]
             )[0]
             offset += ENTRYCOUNTSIZE
 
@@ -440,8 +440,8 @@
         self.NODEINDEXENTRYLENGTH = NODEINDEXENTRYLENGTH
 
     def add(self, filename, node, p1, p2, linknode, copyfrom):
-        copyfrom = copyfrom or ''
-        copyfromlen = struct.pack('!H', len(copyfrom))
+        copyfrom = copyfrom or b''
+        copyfromlen = struct.pack(b'!H', len(copyfrom))
         self.fileentries.setdefault(filename, []).append(
             (node, p1, p2, linknode, copyfromlen, copyfrom)
         )
@@ -471,11 +471,11 @@
 
             # Write the file section header
             self.writeraw(
-                "%s%s%s"
+                b"%s%s%s"
                 % (
-                    struct.pack('!H', len(filename)),
+                    struct.pack(b'!H', len(filename)),
                     filename,
-                    struct.pack('!I', len(sortednodes)),
+                    struct.pack(b'!I', len(sortednodes)),
                 )
             )
 
@@ -488,11 +488,11 @@
             offset = sectionstart + sectionlen
             for node in sortednodes:
                 locations[node] = offset
-                raw = '%s%s%s%s%s%s' % entrymap[node]
+                raw = b'%s%s%s%s%s%s' % entrymap[node]
                 rawstrings.append(raw)
                 offset += len(raw)
 
-            rawdata = ''.join(rawstrings)
+            rawdata = b''.join(rawstrings)
             sectionlen += len(rawdata)
 
             self.writeraw(rawdata)
@@ -523,7 +523,7 @@
         files = sorted(files)
 
         # node index is after file index size, file index, and node index size
-        indexlensize = struct.calcsize('!Q')
+        indexlensize = struct.calcsize(b'!Q')
         nodeindexoffset = (
             indexoffset
             + indexlensize
@@ -564,7 +564,9 @@
 
             fileindexentries.append(rawentry)
 
-        nodecountraw = struct.pack('!Q', nodecount)
+        nodecountraw = struct.pack(b'!Q', nodecount)
         return (
-            ''.join(fileindexentries) + nodecountraw + ''.join(nodeindexentries)
+            b''.join(fileindexentries)
+            + nodecountraw
+            + b''.join(nodeindexentries)
         )
--- a/hgext/remotefilelog/metadatastore.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/remotefilelog/metadatastore.py	Sun Oct 06 09:48:39 2019 -0400
@@ -99,7 +99,7 @@
 
     def add(self, name, node, data):
         raise RuntimeError(
-            "cannot add content only to remotefilelog " "contentstore"
+            b"cannot add content only to remotefilelog " b"contentstore"
         )
 
     def getmissing(self, keys):
@@ -136,7 +136,7 @@
 
     def add(self, name, node, parents, linknode):
         raise RuntimeError(
-            "cannot add metadata only to remotefilelog " "metadatastore"
+            b"cannot add metadata only to remotefilelog " b"metadatastore"
         )
 
 
@@ -155,7 +155,7 @@
         return self.getancestors(name, node)[node]
 
     def add(self, name, node, data):
-        raise RuntimeError("cannot add to a remote store")
+        raise RuntimeError(b"cannot add to a remote store")
 
     def getmissing(self, keys):
         return keys
--- a/hgext/remotefilelog/remotefilectx.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/remotefilelog/remotefilectx.py	Sun Oct 06 09:48:39 2019 -0400
@@ -246,14 +246,14 @@
             return linknode
 
         commonlogkwargs = {
-            r'revs': ' '.join([hex(cl.node(rev)) for rev in revs]),
+            r'revs': b' '.join([hex(cl.node(rev)) for rev in revs]),
             r'fnode': hex(fnode),
             r'filepath': path,
             r'user': shallowutil.getusername(repo.ui),
             r'reponame': shallowutil.getreponame(repo.ui),
         }
 
-        repo.ui.log('linkrevfixup', 'adjusting linknode\n', **commonlogkwargs)
+        repo.ui.log(b'linkrevfixup', b'adjusting linknode\n', **commonlogkwargs)
 
         pc = repo._phasecache
         seenpublic = False
@@ -322,7 +322,7 @@
         # the slow path is used too much. One promising possibility is using
         # obsolescence markers to find a more-likely-correct linkrev.
 
-        logmsg = ''
+        logmsg = b''
         start = time.time()
         try:
             repo.fileservice.prefetch([(path, hex(fnode))], force=True)
@@ -333,18 +333,18 @@
             self._ancestormap = None
             linknode = self.ancestormap()[fnode][2]  # 2 is linknode
             if self._verifylinknode(revs, linknode):
-                logmsg = 'remotefilelog prefetching succeeded'
+                logmsg = b'remotefilelog prefetching succeeded'
                 return linknode
-            logmsg = 'remotefilelog prefetching not found'
+            logmsg = b'remotefilelog prefetching not found'
             return None
         except Exception as e:
-            logmsg = 'remotefilelog prefetching failed (%s)' % e
+            logmsg = b'remotefilelog prefetching failed (%s)' % e
             return None
         finally:
             elapsed = time.time() - start
             repo.ui.log(
-                'linkrevfixup',
-                logmsg + '\n',
+                b'linkrevfixup',
+                logmsg + b'\n',
                 elapsed=elapsed * 1000,
                 **commonlogkwargs
             )
@@ -473,7 +473,8 @@
                     queue.append(parent)
 
         self._repo.ui.debug(
-            'remotefilelog: prefetching %d files ' 'for annotate\n' % len(fetch)
+            b'remotefilelog: prefetching %d files '
+            b'for annotate\n' % len(fetch)
         )
         if fetch:
             self._repo.fileservice.prefetch(fetch)
@@ -518,7 +519,7 @@
                 p2ctx = self._repo.filectx(p2[0], fileid=p2[1])
                 m.update(p2ctx.filelog().ancestormap(p2[1]))
 
-            copyfrom = ''
+            copyfrom = b''
             if renamed:
                 copyfrom = renamed[0]
             m[None] = (p1[1], p2[1], nullid, copyfrom)
--- a/hgext/remotefilelog/remotefilelog.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/remotefilelog/remotefilelog.py	Sun Oct 06 09:48:39 2019 -0400
@@ -67,15 +67,15 @@
     def read(self, node):
         """returns the file contents at this node"""
         t = self.revision(node)
-        if not t.startswith('\1\n'):
+        if not t.startswith(b'\1\n'):
             return t
-        s = t.index('\1\n', 2)
+        s = t.index(b'\1\n', 2)
         return t[s + 2 :]
 
     def add(self, text, meta, transaction, linknode, p1=None, p2=None):
         # hash with the metadata, like in vanilla filelogs
         hashtext = shallowutil.createrevlogtext(
-            text, meta.get('copy'), meta.get('copyrev')
+            text, meta.get(b'copy'), meta.get(b'copyrev')
         )
         node = storageutil.hashrevisionsha1(hashtext, p1, p2)
         return self.addrevision(
@@ -85,15 +85,15 @@
     def _createfileblob(self, text, meta, flags, p1, p2, node, linknode):
         # text passed to "_createfileblob" does not include filelog metadata
         header = shallowutil.buildfileblobheader(len(text), flags)
-        data = "%s\0%s" % (header, text)
+        data = b"%s\0%s" % (header, text)
 
         realp1 = p1
-        copyfrom = ""
-        if meta and 'copy' in meta:
-            copyfrom = meta['copy']
-            realp1 = bin(meta['copyrev'])
+        copyfrom = b""
+        if meta and b'copy' in meta:
+            copyfrom = meta[b'copy']
+            realp1 = bin(meta[b'copyrev'])
 
-        data += "%s%s%s%s%s\0" % (node, realp1, p2, linknode, copyfrom)
+        data += b"%s%s%s%s%s\0" % (node, realp1, p2, linknode, copyfrom)
 
         visited = set()
 
@@ -112,15 +112,15 @@
             queue.append(p2)
             visited.add(p2)
 
-        ancestortext = ""
+        ancestortext = b""
 
         # add the ancestors in topological order
         while queue:
             c = queue.pop(0)
             pa1, pa2, ancestorlinknode, pacopyfrom = pancestors[c]
 
-            pacopyfrom = pacopyfrom or ''
-            ancestortext += "%s%s%s%s%s\0" % (
+            pacopyfrom = pacopyfrom or b''
+            ancestortext += b"%s%s%s%s%s\0" % (
                 c,
                 pa1,
                 pa2,
@@ -249,14 +249,14 @@
     __bool__ = __nonzero__
 
     def __len__(self):
-        if self.filename == '.hgtags':
+        if self.filename == b'.hgtags':
             # The length of .hgtags is used to fast path tag checking.
             # remotefilelog doesn't support .hgtags since the entire .hgtags
             # history is needed.  Use the excludepattern setting to make
             # .hgtags a normal filelog.
             return 0
 
-        raise RuntimeError("len not supported")
+        raise RuntimeError(b"len not supported")
 
     def empty(self):
         return False
@@ -264,7 +264,7 @@
     def flags(self, node):
         if isinstance(node, int):
             raise error.ProgrammingError(
-                'remotefilelog does not accept integer rev for flags'
+                b'remotefilelog does not accept integer rev for flags'
             )
         store = self.repo.contentstore
         return store.getmeta(self.filename, node).get(constants.METAKEYFLAG, 0)
@@ -338,7 +338,7 @@
             node = bin(node)
         if len(node) != 20:
             raise error.LookupError(
-                node, self.filename, _('invalid lookup input')
+                node, self.filename, _(b'invalid lookup input')
             )
 
         return node
@@ -351,17 +351,17 @@
         # This is a hack.
         if isinstance(rev, int):
             raise error.ProgrammingError(
-                'remotefilelog does not convert integer rev to node'
+                b'remotefilelog does not convert integer rev to node'
             )
         return rev
 
     def _processflags(self, text, flags, operation, raw=False):
         """deprecated entry point to access flag processors"""
-        msg = '_processflag(...) use the specialized variant'
-        util.nouideprecwarn(msg, '5.2', stacklevel=2)
+        msg = b'_processflag(...) use the specialized variant'
+        util.nouideprecwarn(msg, b'5.2', stacklevel=2)
         if raw:
             return text, flagutil.processflagsraw(self, text, flags)
-        elif operation == 'read':
+        elif operation == b'read':
             return flagutil.processflagsread(self, text, flags)
         else:  # write operation
             return flagutil.processflagswrite(self, text, flags)
@@ -373,10 +373,10 @@
         hg clients.
         """
         if node == nullid:
-            return ""
+            return b""
         if len(node) != 20:
             raise error.LookupError(
-                node, self.filename, _('invalid revision input')
+                node, self.filename, _(b'invalid revision input')
             )
         if node == wdirid or node in wdirfilenodeids:
             raise error.WdirUnsupported
@@ -418,7 +418,7 @@
         except KeyError:
             pass
 
-        raise error.LookupError(id, self.filename, _('no node'))
+        raise error.LookupError(id, self.filename, _(b'no node'))
 
     def ancestormap(self, node):
         return self.repo.metadatastore.getancestors(self.filename, node)
--- a/hgext/remotefilelog/remotefilelogserver.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/remotefilelog/remotefilelogserver.py	Sun Oct 06 09:48:39 2019 -0400
@@ -51,15 +51,15 @@
             includepattern = None
             excludepattern = None
             for cap in self._bundlecaps or []:
-                if cap.startswith("includepattern="):
-                    includepattern = cap[len("includepattern=") :].split('\0')
-                elif cap.startswith("excludepattern="):
-                    excludepattern = cap[len("excludepattern=") :].split('\0')
+                if cap.startswith(b"includepattern="):
+                    includepattern = cap[len(b"includepattern=") :].split(b'\0')
+                elif cap.startswith(b"excludepattern="):
+                    excludepattern = cap[len(b"excludepattern=") :].split(b'\0')
 
             m = match.always()
             if includepattern or excludepattern:
                 m = match.match(
-                    repo.root, '', None, includepattern, excludepattern
+                    repo.root, b'', None, includepattern, excludepattern
                 )
 
             changedfiles = list([f for f in changedfiles if not m(f)])
@@ -68,7 +68,7 @@
         )
 
     extensions.wrapfunction(
-        changegroup.cgpacker, 'generatefiles', generatefiles
+        changegroup.cgpacker, b'generatefiles', generatefiles
     )
 
 
@@ -85,13 +85,13 @@
 
     # support file content requests
     wireprotov1server.wireprotocommand(
-        'x_rfl_getflogheads', 'path', permission='pull'
+        b'x_rfl_getflogheads', b'path', permission=b'pull'
     )(getflogheads)
-    wireprotov1server.wireprotocommand('x_rfl_getfiles', '', permission='pull')(
-        getfiles
-    )
     wireprotov1server.wireprotocommand(
-        'x_rfl_getfile', 'file node', permission='pull'
+        b'x_rfl_getfiles', b'', permission=b'pull'
+    )(getfiles)
+    wireprotov1server.wireprotocommand(
+        b'x_rfl_getfile', b'file node', permission=b'pull'
     )(getfile)
 
     class streamstate(object):
@@ -104,12 +104,12 @@
     def stream_out_shallow(repo, proto, other):
         includepattern = None
         excludepattern = None
-        raw = other.get('includepattern')
+        raw = other.get(b'includepattern')
         if raw:
-            includepattern = raw.split('\0')
-        raw = other.get('excludepattern')
+            includepattern = raw.split(b'\0')
+        raw = other.get(b'excludepattern')
         if raw:
-            excludepattern = raw.split('\0')
+            excludepattern = raw.split(b'\0')
 
         oldshallow = state.shallowremote
         oldmatch = state.match
@@ -117,10 +117,10 @@
         try:
             state.shallowremote = True
             state.match = match.always()
-            state.noflatmf = other.get('noflatmanifest') == 'True'
+            state.noflatmf = other.get(b'noflatmanifest') == b'True'
             if includepattern or excludepattern:
                 state.match = match.match(
-                    repo.root, '', None, includepattern, excludepattern
+                    repo.root, b'', None, includepattern, excludepattern
                 )
             streamres = wireprotov1server.stream(repo, proto)
 
@@ -141,7 +141,10 @@
             state.match = oldmatch
             state.noflatmf = oldnoflatmf
 
-    wireprotov1server.commands['stream_out_shallow'] = (stream_out_shallow, '*')
+    wireprotov1server.commands[b'stream_out_shallow'] = (
+        stream_out_shallow,
+        b'*',
+    )
 
     # don't clone filelogs to shallow clients
     def _walkstreamfiles(orig, repo, matcher=None):
@@ -150,22 +153,24 @@
             if shallowutil.isenabled(repo):
                 striplen = len(repo.store.path) + 1
                 readdir = repo.store.rawvfs.readdir
-                visit = [os.path.join(repo.store.path, 'data')]
+                visit = [os.path.join(repo.store.path, b'data')]
                 while visit:
                     p = visit.pop()
                     for f, kind, st in readdir(p, stat=True):
-                        fp = p + '/' + f
+                        fp = p + b'/' + f
                         if kind == stat.S_IFREG:
-                            if not fp.endswith('.i') and not fp.endswith('.d'):
+                            if not fp.endswith(b'.i') and not fp.endswith(
+                                b'.d'
+                            ):
                                 n = util.pconvert(fp[striplen:])
                                 yield (store.decodedir(n), n, st.st_size)
                         if kind == stat.S_IFDIR:
                             visit.append(fp)
 
-            if 'treemanifest' in repo.requirements:
+            if b'treemanifest' in repo.requirements:
                 for (u, e, s) in repo.store.datafiles():
-                    if u.startswith('meta/') and (
-                        u.endswith('.i') or u.endswith('.d')
+                    if u.startswith(b'meta/') and (
+                        u.endswith(b'.i') or u.endswith(b'.d')
                     ):
                         yield (u, e, s)
 
@@ -178,7 +183,7 @@
                         yield (u, e, s)
 
             for x in repo.store.topfiles():
-                if state.noflatmf and x[0][:11] == '00manifest.':
+                if state.noflatmf and x[0][:11] == b'00manifest.':
                     continue
                 yield x
 
@@ -187,47 +192,47 @@
             # since it would require fetching every version of every
             # file in order to create the revlogs.
             raise error.Abort(
-                _("Cannot clone from a shallow repo " "to a full repo.")
+                _(b"Cannot clone from a shallow repo " b"to a full repo.")
             )
         else:
             for x in orig(repo, matcher):
                 yield x
 
-    extensions.wrapfunction(streamclone, '_walkstreamfiles', _walkstreamfiles)
+    extensions.wrapfunction(streamclone, b'_walkstreamfiles', _walkstreamfiles)
 
     # expose remotefilelog capabilities
     def _capabilities(orig, repo, proto):
         caps = orig(repo, proto)
         if shallowutil.isenabled(repo) or ui.configbool(
-            'remotefilelog', 'server'
+            b'remotefilelog', b'server'
         ):
             if isinstance(proto, _sshv1server):
                 # legacy getfiles method which only works over ssh
                 caps.append(constants.NETWORK_CAP_LEGACY_SSH_GETFILES)
-            caps.append('x_rfl_getflogheads')
-            caps.append('x_rfl_getfile')
+            caps.append(b'x_rfl_getflogheads')
+            caps.append(b'x_rfl_getfile')
         return caps
 
-    extensions.wrapfunction(wireprotov1server, '_capabilities', _capabilities)
+    extensions.wrapfunction(wireprotov1server, b'_capabilities', _capabilities)
 
     def _adjustlinkrev(orig, self, *args, **kwargs):
         # When generating file blobs, taking the real path is too slow on large
         # repos, so force it to just return the linkrev directly.
         repo = self._repo
-        if util.safehasattr(repo, 'forcelinkrev') and repo.forcelinkrev:
+        if util.safehasattr(repo, b'forcelinkrev') and repo.forcelinkrev:
             return self._filelog.linkrev(self._filelog.rev(self._filenode))
         return orig(self, *args, **kwargs)
 
     extensions.wrapfunction(
-        context.basefilectx, '_adjustlinkrev', _adjustlinkrev
+        context.basefilectx, b'_adjustlinkrev', _adjustlinkrev
     )
 
     def _iscmd(orig, cmd):
-        if cmd == 'x_rfl_getfiles':
+        if cmd == b'x_rfl_getfiles':
             return False
         return orig(cmd)
 
-    extensions.wrapfunction(wireprotoserver, 'iscmd', _iscmd)
+    extensions.wrapfunction(wireprotoserver, b'iscmd', _iscmd)
 
 
 def _loadfileblob(repo, cachepath, path, node):
@@ -255,7 +260,7 @@
 
             f = None
             try:
-                f = util.atomictempfile(filecachepath, "wb")
+                f = util.atomictempfile(filecachepath, b"wb")
                 f.write(text)
             except (IOError, OSError):
                 # Don't abort if the user only has permission to read,
@@ -267,7 +272,7 @@
         finally:
             os.umask(oldumask)
     else:
-        with open(filecachepath, "rb") as f:
+        with open(filecachepath, b"rb") as f:
             text = f.read()
     return text
 
@@ -277,7 +282,7 @@
     """
     flog = repo.file(path)
     heads = flog.heads()
-    return '\n'.join((hex(head) for head in heads if head != nullid))
+    return b'\n'.join((hex(head) for head in heads if head != nullid))
 
 
 def getfile(repo, proto, file, node):
@@ -290,30 +295,30 @@
     createfileblob for its content.
     """
     if shallowutil.isenabled(repo):
-        return '1\0' + _('cannot fetch remote files from shallow repo')
-    cachepath = repo.ui.config("remotefilelog", "servercachepath")
+        return b'1\0' + _(b'cannot fetch remote files from shallow repo')
+    cachepath = repo.ui.config(b"remotefilelog", b"servercachepath")
     if not cachepath:
-        cachepath = os.path.join(repo.path, "remotefilelogcache")
+        cachepath = os.path.join(repo.path, b"remotefilelogcache")
     node = bin(node.strip())
     if node == nullid:
-        return '0\0'
-    return '0\0' + _loadfileblob(repo, cachepath, file, node)
+        return b'0\0'
+    return b'0\0' + _loadfileblob(repo, cachepath, file, node)
 
 
 def getfiles(repo, proto):
     """A server api for requesting particular versions of particular files.
     """
     if shallowutil.isenabled(repo):
-        raise error.Abort(_('cannot fetch remote files from shallow repo'))
+        raise error.Abort(_(b'cannot fetch remote files from shallow repo'))
     if not isinstance(proto, _sshv1server):
-        raise error.Abort(_('cannot fetch remote files over non-ssh protocol'))
+        raise error.Abort(_(b'cannot fetch remote files over non-ssh protocol'))
 
     def streamer():
         fin = proto._fin
 
-        cachepath = repo.ui.config("remotefilelog", "servercachepath")
+        cachepath = repo.ui.config(b"remotefilelog", b"servercachepath")
         if not cachepath:
-            cachepath = os.path.join(repo.path, "remotefilelogcache")
+            cachepath = os.path.join(repo.path, b"remotefilelogcache")
 
         while True:
             request = fin.readline()[:-1]
@@ -322,14 +327,14 @@
 
             node = bin(request[:40])
             if node == nullid:
-                yield '0\n'
+                yield b'0\n'
                 continue
 
             path = request[40:]
 
             text = _loadfileblob(repo, cachepath, path, node)
 
-            yield '%d\n%s' % (len(text), text)
+            yield b'%d\n%s' % (len(text), text)
 
             # it would be better to only flush after processing a whole batch
             # but currently we don't know if there are more requests coming
@@ -371,7 +376,7 @@
         repo.forcelinkrev = True
         ancestors.extend([f for f in filectx.ancestors()])
 
-        ancestortext = ""
+        ancestortext = b""
         for ancestorctx in ancestors:
             parents = ancestorctx.parents()
             p1 = nullid
@@ -381,12 +386,12 @@
             if len(parents) > 1:
                 p2 = parents[1].filenode()
 
-            copyname = ""
+            copyname = b""
             rename = ancestorctx.renamed()
             if rename:
                 copyname = rename[0]
             linknode = ancestorctx.node()
-            ancestortext += "%s%s%s%s%s\0" % (
+            ancestortext += b"%s%s%s%s%s\0" % (
                 ancestorctx.filenode(),
                 p1,
                 p2,
@@ -398,17 +403,17 @@
 
     header = shallowutil.buildfileblobheader(len(text), revlogflags)
 
-    return "%s\0%s%s" % (header, text, ancestortext)
+    return b"%s\0%s%s" % (header, text, ancestortext)
 
 
 def gcserver(ui, repo):
-    if not repo.ui.configbool("remotefilelog", "server"):
+    if not repo.ui.configbool(b"remotefilelog", b"server"):
         return
 
     neededfiles = set()
-    heads = repo.revs("heads(tip~25000:) - null")
+    heads = repo.revs(b"heads(tip~25000:) - null")
 
-    cachepath = repo.vfs.join("remotefilelogcache")
+    cachepath = repo.vfs.join(b"remotefilelogcache")
     for head in heads:
         mf = repo[head].manifest()
         for filename, filenode in mf.iteritems():
@@ -416,10 +421,10 @@
             neededfiles.add(filecachepath)
 
     # delete unneeded older files
-    days = repo.ui.configint("remotefilelog", "serverexpiration")
+    days = repo.ui.configint(b"remotefilelog", b"serverexpiration")
     expiration = time.time() - (days * 24 * 60 * 60)
 
-    progress = ui.makeprogress(_("removing old server cache"), unit="files")
+    progress = ui.makeprogress(_(b"removing old server cache"), unit=b"files")
     progress.update(0)
     for root, dirs, files in os.walk(cachepath):
         for file in files:
--- a/hgext/remotefilelog/repack.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/remotefilelog/repack.py	Sun Oct 06 09:48:39 2019 -0400
@@ -39,13 +39,13 @@
 def backgroundrepack(
     repo, incremental=True, packsonly=False, ensurestart=False
 ):
-    cmd = [procutil.hgexecutable(), '-R', repo.origroot, 'repack']
-    msg = _("(running background repack)\n")
+    cmd = [procutil.hgexecutable(), b'-R', repo.origroot, b'repack']
+    msg = _(b"(running background repack)\n")
     if incremental:
-        cmd.append('--incremental')
-        msg = _("(running background incremental repack)\n")
+        cmd.append(b'--incremental')
+        msg = _(b"(running background incremental repack)\n")
     if packsonly:
-        cmd.append('--packsonly')
+        cmd.append(b'--packsonly')
     repo.ui.warn(msg)
     # We know this command will find a binary, so don't block on it starting.
     procutil.runbgcommand(cmd, encoding.environ, ensurestart=ensurestart)
@@ -54,7 +54,7 @@
 def fullrepack(repo, options=None):
     """If ``packsonly`` is True, stores creating only loose objects are skipped.
     """
-    if util.safehasattr(repo, 'shareddatastores'):
+    if util.safehasattr(repo, b'shareddatastores'):
         datasource = contentstore.unioncontentstore(*repo.shareddatastores)
         historysource = metadatastore.unionmetadatastore(
             *repo.sharedhistorystores, allowincomplete=True
@@ -72,7 +72,7 @@
             options=options,
         )
 
-    if util.safehasattr(repo.manifestlog, 'datastore'):
+    if util.safehasattr(repo.manifestlog, b'datastore'):
         localdata, shareddata = _getmanifeststores(repo)
         lpackpath, ldstores, lhstores = localdata
         spackpath, sdstores, shstores = shareddata
@@ -112,7 +112,7 @@
     """This repacks the repo by looking at the distribution of pack files in the
     repo and performing the most minimal repack to keep the repo in good shape.
     """
-    if util.safehasattr(repo, 'shareddatastores'):
+    if util.safehasattr(repo, b'shareddatastores'):
         packpath = shallowutil.getcachepackpath(
             repo, constants.FILEPACK_CATEGORY
         )
@@ -125,7 +125,7 @@
             options=options,
         )
 
-    if util.safehasattr(repo.manifestlog, 'datastore'):
+    if util.safehasattr(repo.manifestlog, b'datastore'):
         localdata, shareddata = _getmanifeststores(repo)
         lpackpath, ldstores, lhstores = localdata
         spackpath, sdstores, shstores = shareddata
@@ -181,13 +181,13 @@
     """Deletes packfiles that are bigger than ``packs.maxpacksize``.
 
     Returns ``files` with the removed files omitted."""
-    maxsize = repo.ui.configbytes("packs", "maxpacksize")
+    maxsize = repo.ui.configbytes(b"packs", b"maxpacksize")
     if maxsize <= 0:
         return files
 
     # This only considers datapacks today, but we could broaden it to include
     # historypacks.
-    VALIDEXTS = [".datapack", ".dataidx"]
+    VALIDEXTS = [b".datapack", b".dataidx"]
 
     # Either an oversize index or datapack will trigger cleanup of the whole
     # pack:
@@ -202,7 +202,7 @@
         for ext in VALIDEXTS:
             path = rootpath + ext
             repo.ui.debug(
-                'removing oversize packfile %s (%s)\n'
+                b'removing oversize packfile %s (%s)\n'
                 % (path, util.bytecount(os.stat(path).st_size))
             )
             os.unlink(path)
@@ -273,14 +273,16 @@
 
 def _computeincrementaldatapack(ui, files):
     opts = {
-        'gencountlimit': ui.configint('remotefilelog', 'data.gencountlimit'),
-        'generations': ui.configlist('remotefilelog', 'data.generations'),
-        'maxrepackpacks': ui.configint('remotefilelog', 'data.maxrepackpacks'),
-        'repackmaxpacksize': ui.configbytes(
-            'remotefilelog', 'data.repackmaxpacksize'
+        b'gencountlimit': ui.configint(b'remotefilelog', b'data.gencountlimit'),
+        b'generations': ui.configlist(b'remotefilelog', b'data.generations'),
+        b'maxrepackpacks': ui.configint(
+            b'remotefilelog', b'data.maxrepackpacks'
         ),
-        'repacksizelimit': ui.configbytes(
-            'remotefilelog', 'data.repacksizelimit'
+        b'repackmaxpacksize': ui.configbytes(
+            b'remotefilelog', b'data.repackmaxpacksize'
+        ),
+        b'repacksizelimit': ui.configbytes(
+            b'remotefilelog', b'data.repacksizelimit'
         ),
     }
 
@@ -292,18 +294,20 @@
 
 def _computeincrementalhistorypack(ui, files):
     opts = {
-        'gencountlimit': ui.configint('remotefilelog', 'history.gencountlimit'),
-        'generations': ui.configlist(
-            'remotefilelog', 'history.generations', ['100MB']
+        b'gencountlimit': ui.configint(
+            b'remotefilelog', b'history.gencountlimit'
+        ),
+        b'generations': ui.configlist(
+            b'remotefilelog', b'history.generations', [b'100MB']
         ),
-        'maxrepackpacks': ui.configint(
-            'remotefilelog', 'history.maxrepackpacks'
+        b'maxrepackpacks': ui.configint(
+            b'remotefilelog', b'history.maxrepackpacks'
         ),
-        'repackmaxpacksize': ui.configbytes(
-            'remotefilelog', 'history.repackmaxpacksize', '400MB'
+        b'repackmaxpacksize': ui.configbytes(
+            b'remotefilelog', b'history.repackmaxpacksize', b'400MB'
         ),
-        'repacksizelimit': ui.configbytes(
-            'remotefilelog', 'history.repacksizelimit'
+        b'repacksizelimit': ui.configbytes(
+            b'remotefilelog', b'history.repacksizelimit'
         ),
     }
 
@@ -341,7 +345,7 @@
     """
 
     limits = list(
-        sorted((util.sizetoint(s) for s in opts['generations']), reverse=True)
+        sorted((util.sizetoint(s) for s in opts[b'generations']), reverse=True)
     )
     limits.append(0)
 
@@ -353,7 +357,7 @@
     sizes = {}
     for prefix, mode, stat in files:
         size = stat.st_size
-        if size > opts['repackmaxpacksize']:
+        if size > opts[b'repackmaxpacksize']:
             continue
 
         sizes[prefix] = size
@@ -370,7 +374,7 @@
     # Find the largest generation with more than gencountlimit packs
     genpacks = []
     for i, limit in enumerate(limits):
-        if len(generations[i]) > opts['gencountlimit']:
+        if len(generations[i]) > opts[b'gencountlimit']:
             # Sort to be smallest last, for easy popping later
             genpacks.extend(
                 sorted(generations[i], reverse=True, key=lambda x: sizes[x])
@@ -382,9 +386,9 @@
     genpacks = genpacks[:-3]
     repacksize = sum(sizes[n] for n in chosenpacks)
     while (
-        repacksize < opts['repacksizelimit']
+        repacksize < opts[b'repacksizelimit']
         and genpacks
-        and len(chosenpacks) < opts['maxrepackpacks']
+        and len(chosenpacks) < opts[b'maxrepackpacks']
     ):
         chosenpacks.append(genpacks.pop())
         repacksize += sizes[chosenpacks[-1]]
@@ -404,12 +408,12 @@
         filectx = repo.filectx(filename, fileid=node)
         filetime = repo[filectx.linkrev()].date()
 
-        ttl = repo.ui.configint('remotefilelog', 'nodettl')
+        ttl = repo.ui.configint(b'remotefilelog', b'nodettl')
 
         limit = time.time() - ttl
         return filetime[0] < limit
 
-    garbagecollect = repo.ui.configbool('remotefilelog', 'gcrepack')
+    garbagecollect = repo.ui.configbool(b'remotefilelog', b'gcrepack')
     if not fullhistory:
         fullhistory = history
     packer = repacker(
@@ -429,7 +433,10 @@
                 packer.run(dpack, hpack)
             except error.LockHeld:
                 raise RepackAlreadyRunning(
-                    _("skipping repack - another repack " "is already running")
+                    _(
+                        b"skipping repack - another repack "
+                        b"is already running"
+                    )
                 )
 
 
@@ -449,16 +456,16 @@
     # 2. Draft commits
     # 3. Parents of draft commits
     # 4. Pullprefetch and bgprefetchrevs revsets if specified
-    revs = ['.', 'draft()', 'parents(draft())']
-    prefetchrevs = repo.ui.config('remotefilelog', 'pullprefetch', None)
+    revs = [b'.', b'draft()', b'parents(draft())']
+    prefetchrevs = repo.ui.config(b'remotefilelog', b'pullprefetch', None)
     if prefetchrevs:
-        revs.append('(%s)' % prefetchrevs)
-    prefetchrevs = repo.ui.config('remotefilelog', 'bgprefetchrevs', None)
+        revs.append(b'(%s)' % prefetchrevs)
+    prefetchrevs = repo.ui.config(b'remotefilelog', b'bgprefetchrevs', None)
     if prefetchrevs:
-        revs.append('(%s)' % prefetchrevs)
-    revs = '+'.join(revs)
+        revs.append(b'(%s)' % prefetchrevs)
+    revs = b'+'.join(revs)
 
-    revs = ['sort((%s), "topo")' % revs]
+    revs = [b'sort((%s), "topo")' % revs]
     keep = scmutil.revrange(repo, revs)
 
     processed = set()
@@ -520,7 +527,7 @@
         self.options = options
         if self.garbagecollect:
             if not isold:
-                raise ValueError("Function 'isold' is not properly specified")
+                raise ValueError(b"Function 'isold' is not properly specified")
             # use (filename, node) tuple as a keepset key
             self.keepkeys = keepset(repo, lambda f, n: (f, n))
             self.isold = isold
@@ -529,9 +536,9 @@
         ledger = repackledger()
 
         with lockmod.lock(
-            repacklockvfs(self.repo), "repacklock", desc=None, timeout=0
+            repacklockvfs(self.repo), b"repacklock", desc=None, timeout=0
         ):
-            self.repo.hook('prerepack')
+            self.repo.hook(b'prerepack')
 
             # Populate ledger from source
             self.data.markledger(ledger, options=self.options)
@@ -571,8 +578,8 @@
         orphans = list(sorted(orphans, key=getsize, reverse=True))
         if ui.debugflag:
             ui.debug(
-                "%s: orphan chain: %s\n"
-                % (filename, ", ".join([short(s) for s in orphans]))
+                b"%s: orphan chain: %s\n"
+                % (filename, b", ".join([short(s) for s in orphans]))
             )
 
         # Create one contiguous chain and reassign deltabases.
@@ -588,7 +595,7 @@
 
     def repackdata(self, ledger, target):
         ui = self.repo.ui
-        maxchainlen = ui.configint('packs', 'maxchainlen', 1000)
+        maxchainlen = ui.configint(b'packs', b'maxchainlen', 1000)
 
         byfile = {}
         for entry in ledger.entries.itervalues():
@@ -597,7 +604,7 @@
 
         count = 0
         repackprogress = ui.makeprogress(
-            _("repacking data"), unit=self.unit, total=len(byfile)
+            _(b"repacking data"), unit=self.unit, total=len(byfile)
         )
         for filename, entries in sorted(byfile.iteritems()):
             repackprogress.update(count)
@@ -606,7 +613,7 @@
             nodes = list(node for node in entries)
             nohistory = []
             buildprogress = ui.makeprogress(
-                _("building history"), unit='nodes', total=len(nodes)
+                _(b"building history"), unit=b'nodes', total=len(nodes)
             )
             for i, node in enumerate(nodes):
                 if node in ancestors:
@@ -629,7 +636,7 @@
             orderednodes = list(reversed(self._toposort(ancestors)))
             if len(nohistory) > 0:
                 ui.debug(
-                    'repackdata: %d nodes without history\n' % len(nohistory)
+                    b'repackdata: %d nodes without history\n' % len(nohistory)
                 )
             orderednodes.extend(sorted(nohistory))
 
@@ -659,7 +666,7 @@
             referenced = set()
             nodes = set(nodes)
             processprogress = ui.makeprogress(
-                _("processing nodes"), unit='nodes', total=len(orderednodes)
+                _(b"processing nodes"), unit=b'nodes', total=len(orderednodes)
             )
             for i, node in enumerate(orderednodes):
                 processprogress.update(i)
@@ -698,7 +705,7 @@
                             deltabases[p2] = (node, chainlen + 1)
 
             # experimental config: repack.chainorphansbysize
-            if ui.configbool('repack', 'chainorphansbysize'):
+            if ui.configbool(b'repack', b'chainorphansbysize'):
                 orphans = nobase - referenced
                 orderednodes = self._chainorphans(
                     ui, filename, orderednodes, orphans, deltabases
@@ -751,7 +758,7 @@
                 byfile.setdefault(entry.filename, {})[entry.node] = entry
 
         progress = ui.makeprogress(
-            _("repacking history"), unit=self.unit, total=len(byfile)
+            _(b"repacking history"), unit=self.unit, total=len(byfile)
         )
         for filename, entries in sorted(byfile.iteritems()):
             ancestors = {}
@@ -894,7 +901,7 @@
 
 
 def repacklockvfs(repo):
-    if util.safehasattr(repo, 'name'):
+    if util.safehasattr(repo, b'name'):
         # Lock in the shared cache so repacks across multiple copies of the same
         # repo are coordinated.
         sharedcachepath = shallowutil.getcachepackpath(
--- a/hgext/remotefilelog/shallowbundle.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/remotefilelog/shallowbundle.py	Sun Oct 06 09:48:39 2019 -0400
@@ -79,11 +79,11 @@
                 # bundlerepo is heavily tied to revlogs. Instead require that
                 # the user use unbundle instead.
                 # Force load the filelog data.
-                bundlerepo.bundlerepository.file(repo, 'foo')
+                bundlerepo.bundlerepository.file(repo, b'foo')
                 if repo._cgfilespos:
                     raise error.Abort(
-                        "cannot pull from full bundles",
-                        hint="use `hg unbundle` instead",
+                        b"cannot pull from full bundles",
+                        hint=b"use `hg unbundle` instead",
                     )
                 return []
             filestosend = self.shouldaddfilegroups(source)
@@ -99,16 +99,16 @@
         if not shallowutil.isenabled(repo):
             return AllFiles
 
-        if source == "push" or source == "bundle":
+        if source == b"push" or source == b"bundle":
             return AllFiles
 
         caps = self._bundlecaps or []
-        if source == "serve" or source == "pull":
+        if source == b"serve" or source == b"pull":
             if constants.BUNDLE2_CAPABLITY in caps:
                 return LocalFiles
             else:
                 # Serving to a full repo requires us to serve everything
-                repo.ui.warn(_("pulling from a shallow repo\n"))
+                repo.ui.warn(_(b"pulling from a shallow repo\n"))
                 return AllFiles
 
         return NoFiles
@@ -128,7 +128,7 @@
         return results
 
     def nodechunk(self, revlog, node, prevnode, linknode):
-        prefix = ''
+        prefix = b''
         if prevnode == nullid:
             delta = revlog.rawdata(node)
             prefix = mdiff.trivialdiffheader(len(delta))
@@ -152,22 +152,22 @@
     original = repo.shallowmatch
     try:
         # if serving, only send files the clients has patterns for
-        if source == 'serve':
+        if source == b'serve':
             bundlecaps = kwargs.get(r'bundlecaps')
             includepattern = None
             excludepattern = None
             for cap in bundlecaps or []:
-                if cap.startswith("includepattern="):
-                    raw = cap[len("includepattern=") :]
+                if cap.startswith(b"includepattern="):
+                    raw = cap[len(b"includepattern=") :]
                     if raw:
-                        includepattern = raw.split('\0')
-                elif cap.startswith("excludepattern="):
-                    raw = cap[len("excludepattern=") :]
+                        includepattern = raw.split(b'\0')
+                elif cap.startswith(b"excludepattern="):
+                    raw = cap[len(b"excludepattern=") :]
                     if raw:
-                        excludepattern = raw.split('\0')
+                        excludepattern = raw.split(b'\0')
             if includepattern or excludepattern:
                 repo.shallowmatch = match.match(
-                    repo.root, '', None, includepattern, excludepattern
+                    repo.root, b'', None, includepattern, excludepattern
                 )
             else:
                 repo.shallowmatch = match.always()
@@ -192,13 +192,13 @@
     # files in topological order.
 
     # read all the file chunks but don't add them
-    progress = repo.ui.makeprogress(_('files'), total=expectedfiles)
+    progress = repo.ui.makeprogress(_(b'files'), total=expectedfiles)
     while True:
         chunkdata = source.filelogheader()
         if not chunkdata:
             break
-        f = chunkdata["filename"]
-        repo.ui.debug("adding %s revisions\n" % f)
+        f = chunkdata[b"filename"]
+        repo.ui.debug(b"adding %s revisions\n" % f)
         progress.increment()
 
         if not repo.shallowmatch(f):
@@ -224,7 +224,7 @@
                 visited.add(f)
 
         if chain is None:
-            raise error.Abort(_("received file revlog group is empty"))
+            raise error.Abort(_(b"received file revlog group is empty"))
 
     processed = set()
 
@@ -266,7 +266,7 @@
 
         skipcount += 1
         if skipcount > len(queue) + 1:
-            raise error.Abort(_("circular node dependency"))
+            raise error.Abort(_(b"circular node dependency"))
 
         fl = repo.file(f)
 
@@ -283,9 +283,9 @@
             text = bytes(text)
 
         meta, text = shallowutil.parsemeta(text)
-        if 'copy' in meta:
-            copyfrom = meta['copy']
-            copynode = bin(meta['copyrev'])
+        if b'copy' in meta:
+            copyfrom = meta[b'copy']
+            copynode = bin(meta[b'copyrev'])
             if not available(f, node, copyfrom, copynode):
                 continue
 
--- a/hgext/remotefilelog/shallowrepo.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/remotefilelog/shallowrepo.py	Sun Oct 06 09:48:39 2019 -0400
@@ -37,7 +37,7 @@
 # them.
 def makelocalstores(repo):
     """In-repo stores, like .hg/store/data; can not be discarded."""
-    localpath = os.path.join(repo.svfs.vfs.base, 'data')
+    localpath = os.path.join(repo.svfs.vfs.base, b'data')
     if not os.path.exists(localpath):
         os.makedirs(localpath)
 
@@ -92,7 +92,7 @@
     repo.shareddatastores.append(packcontentstore)
     repo.sharedhistorystores.append(packmetadatastore)
     shallowutil.reportpackmetrics(
-        repo.ui, 'filestore', packcontentstore, packmetadatastore
+        repo.ui, b'filestore', packcontentstore, packmetadatastore
     )
     return packcontentstore, packmetadatastore
 
@@ -134,7 +134,7 @@
         fileservicehistorywrite,
     )
     shallowutil.reportpackmetrics(
-        repo.ui, 'filestore', packcontentstore, packmetadatastore
+        repo.ui, b'filestore', packcontentstore, packmetadatastore
     )
 
 
@@ -142,19 +142,19 @@
     class shallowrepository(repo.__class__):
         @util.propertycache
         def name(self):
-            return self.ui.config('remotefilelog', 'reponame')
+            return self.ui.config(b'remotefilelog', b'reponame')
 
         @util.propertycache
         def fallbackpath(self):
             path = repo.ui.config(
-                "remotefilelog",
-                "fallbackpath",
-                repo.ui.config('paths', 'default'),
+                b"remotefilelog",
+                b"fallbackpath",
+                repo.ui.config(b'paths', b'default'),
             )
             if not path:
                 raise error.Abort(
-                    "no remotefilelog server "
-                    "configured - is your .hg/hgrc trusted?"
+                    b"no remotefilelog server "
+                    b"configured - is your .hg/hgrc trusted?"
                 )
 
             return path
@@ -175,7 +175,7 @@
             return ret
 
         def file(self, f):
-            if f[0] == '/':
+            if f[0] == b'/':
                 f = f[1:]
 
             if self.shallowmatch(f):
@@ -224,11 +224,11 @@
         ):
             """Runs prefetch in background with optional repack
             """
-            cmd = [procutil.hgexecutable(), '-R', repo.origroot, 'prefetch']
+            cmd = [procutil.hgexecutable(), b'-R', repo.origroot, b'prefetch']
             if repack:
-                cmd.append('--repack')
+                cmd.append(b'--repack')
             if revs:
-                cmd += ['-r', revs]
+                cmd += [b'-r', revs]
             # We know this command will find a binary, so don't block
             # on it starting.
             procutil.runbgcommand(
@@ -241,11 +241,11 @@
             """
             with repo._lock(
                 repo.svfs,
-                'prefetchlock',
+                b'prefetchlock',
                 True,
                 None,
                 None,
-                _('prefetching in %s') % repo.origroot,
+                _(b'prefetching in %s') % repo.origroot,
             ):
                 self._prefetch(revs, base, pats, opts)
 
@@ -255,10 +255,12 @@
                 # If we know a rev is on the server, we should fetch the server
                 # version of those files, since our local file versions might
                 # become obsolete if the local commits are stripped.
-                localrevs = repo.revs('outgoing(%s)', fallbackpath)
+                localrevs = repo.revs(b'outgoing(%s)', fallbackpath)
                 if base is not None and base != nullrev:
                     serverbase = list(
-                        repo.revs('first(reverse(::%s) - %ld)', base, localrevs)
+                        repo.revs(
+                            b'first(reverse(::%s) - %ld)', base, localrevs
+                        )
                     )
                     if serverbase:
                         base = serverbase[0]
@@ -266,7 +268,7 @@
                 localrevs = repo
 
             mfl = repo.manifestlog
-            mfrevlog = mfl.getstorage('')
+            mfrevlog = mfl.getstorage(b'')
             if base is not None:
                 mfdict = mfl[repo[base].manifestnode()].read()
                 skip = set(mfdict.iteritems())
@@ -280,7 +282,7 @@
             visited = set()
             visited.add(nullrev)
             revcount = len(revs)
-            progress = self.ui.makeprogress(_('prefetching'), total=revcount)
+            progress = self.ui.makeprogress(_(b'prefetching'), total=revcount)
             progress.update(0)
             for rev in sorted(revs):
                 ctx = repo[rev]
@@ -337,15 +339,15 @@
     makeunionstores(repo)
 
     repo.includepattern = repo.ui.configlist(
-        "remotefilelog", "includepattern", None
+        b"remotefilelog", b"includepattern", None
     )
     repo.excludepattern = repo.ui.configlist(
-        "remotefilelog", "excludepattern", None
+        b"remotefilelog", b"excludepattern", None
     )
-    if not util.safehasattr(repo, 'connectionpool'):
+    if not util.safehasattr(repo, b'connectionpool'):
         repo.connectionpool = connectionpool.connectionpool(repo)
 
     if repo.includepattern or repo.excludepattern:
         repo.shallowmatch = match.match(
-            repo.root, '', None, repo.includepattern, repo.excludepattern
+            repo.root, b'', None, repo.includepattern, repo.excludepattern
         )
--- a/hgext/remotefilelog/shallowutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/remotefilelog/shallowutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -48,13 +48,13 @@
 
 
 def getcachepath(ui, allowempty=False):
-    cachepath = ui.config("remotefilelog", "cachepath")
+    cachepath = ui.config(b"remotefilelog", b"cachepath")
     if not cachepath:
         if allowempty:
             return None
         else:
             raise error.Abort(
-                _("could not find config option " "remotefilelog.cachepath")
+                _(b"could not find config option " b"remotefilelog.cachepath")
             )
     return util.expandpath(cachepath)
 
@@ -62,13 +62,13 @@
 def getcachepackpath(repo, category):
     cachepath = getcachepath(repo.ui)
     if category != constants.FILEPACK_CATEGORY:
-        return os.path.join(cachepath, repo.name, 'packs', category)
+        return os.path.join(cachepath, repo.name, b'packs', category)
     else:
-        return os.path.join(cachepath, repo.name, 'packs')
+        return os.path.join(cachepath, repo.name, b'packs')
 
 
 def getlocalpackpath(base, category):
-    return os.path.join(base, 'packs', category)
+    return os.path.join(base, b'packs', category)
 
 
 def createrevlogtext(text, copyfrom=None, copyrev=None):
@@ -76,10 +76,10 @@
     traditional revlog
     """
     meta = {}
-    if copyfrom or text.startswith('\1\n'):
+    if copyfrom or text.startswith(b'\1\n'):
         if copyfrom:
-            meta['copy'] = copyfrom
-            meta['copyrev'] = copyrev
+            meta[b'copy'] = copyfrom
+            meta[b'copyrev'] = copyrev
         text = storageutil.packmeta(meta, text)
 
     return text
@@ -88,8 +88,8 @@
 def parsemeta(text):
     """parse mercurial filelog metadata"""
     meta, size = storageutil.parsemeta(text)
-    if text.startswith('\1\n'):
-        s = text.index('\1\n', 2)
+    if text.startswith(b'\1\n'):
+        s = text.index(b'\1\n', 2)
         text = text[s + 2 :]
     return meta or {}, text
 
@@ -117,8 +117,8 @@
 
 def reportpackmetrics(ui, prefix, *stores):
     dicts = [s.getmetrics() for s in stores]
-    dict = prefixkeys(sumdicts(*dicts), prefix + '_')
-    ui.log(prefix + "_packsizes", "\n", **pycompat.strkwargs(dict))
+    dict = prefixkeys(sumdicts(*dicts), prefix + b'_')
+    ui.log(prefix + b"_packsizes", b"\n", **pycompat.strkwargs(dict))
 
 
 def _parsepackmeta(metabuf):
@@ -136,15 +136,15 @@
     while buflen - offset >= 3:
         key = metabuf[offset : offset + 1]
         offset += 1
-        metalen = struct.unpack_from('!H', metabuf, offset)[0]
+        metalen = struct.unpack_from(b'!H', metabuf, offset)[0]
         offset += 2
         if offset + metalen > buflen:
-            raise ValueError('corrupted metadata: incomplete buffer')
+            raise ValueError(b'corrupted metadata: incomplete buffer')
         value = metabuf[offset : offset + metalen]
         metadict[key] = value
         offset += metalen
     if offset != buflen:
-        raise ValueError('corrupted metadata: redundant data')
+        raise ValueError(b'corrupted metadata: redundant data')
     return metadict
 
 
@@ -158,16 +158,16 @@
     raise ProgrammingError when metadata key is illegal, or ValueError if
     length limit is exceeded
     """
-    metabuf = ''
+    metabuf = b''
     for k, v in sorted((metadict or {}).iteritems()):
         if len(k) != 1:
-            raise error.ProgrammingError('packmeta: illegal key: %s' % k)
+            raise error.ProgrammingError(b'packmeta: illegal key: %s' % k)
         if len(v) > 0xFFFE:
             raise ValueError(
-                'metadata value is too long: 0x%x > 0xfffe' % len(v)
+                b'metadata value is too long: 0x%x > 0xfffe' % len(v)
             )
         metabuf += k
-        metabuf += struct.pack('!H', len(v))
+        metabuf += struct.pack(b'!H', len(v))
         metabuf += v
     # len(metabuf) is guaranteed representable in 4 bytes, because there are
     # only 256 keys, and for each value, len(value) <= 0xfffe.
@@ -190,7 +190,7 @@
     for k, v in (metadict or {}).iteritems():
         expectedtype = _metaitemtypes.get(k, (bytes,))
         if not isinstance(v, expectedtype):
-            raise error.ProgrammingError('packmeta: wrong type of key %s' % k)
+            raise error.ProgrammingError(b'packmeta: wrong type of key %s' % k)
         # normalize int to binary buffer
         if int in expectedtype:
             # optimization: remove flag if it's 0 to save space
@@ -241,19 +241,19 @@
     flags = revlog.REVIDX_DEFAULT_FLAGS
     size = None
     try:
-        index = raw.index('\0')
+        index = raw.index(b'\0')
         header = raw[:index]
-        if header.startswith('v'):
+        if header.startswith(b'v'):
             # v1 and above, header starts with 'v'
-            if header.startswith('v1\n'):
-                for s in header.split('\n'):
+            if header.startswith(b'v1\n'):
+                for s in header.split(b'\n'):
                     if s.startswith(constants.METAKEYSIZE):
                         size = int(s[len(constants.METAKEYSIZE) :])
                     elif s.startswith(constants.METAKEYFLAG):
                         flags = int(s[len(constants.METAKEYFLAG) :])
             else:
                 raise RuntimeError(
-                    'unsupported remotefilelog header: %s' % header
+                    b'unsupported remotefilelog header: %s' % header
                 )
         else:
             # v0, str(int(size)) is the header
@@ -277,7 +277,7 @@
     if version is None:
         version = int(bool(flags))
     if version == 1:
-        header = 'v1\n%s%d\n%s%d' % (
+        header = b'v1\n%s%d\n%s%d' % (
             constants.METAKEYSIZE,
             size,
             constants.METAKEYFLAG,
@@ -285,10 +285,10 @@
         )
     elif version == 0:
         if flags:
-            raise error.ProgrammingError('fileblob v0 does not support flag')
-        header = '%d' % size
+            raise error.ProgrammingError(b'fileblob v0 does not support flag')
+        header = b'%d' % size
     else:
-        raise error.ProgrammingError('unknown fileblob version %d' % version)
+        raise error.ProgrammingError(b'unknown fileblob version %d' % version)
     return header
 
 
@@ -298,7 +298,7 @@
 
     mapping = {}
     while start < len(raw):
-        divider = raw.index('\0', start + 80)
+        divider = raw.index(b'\0', start + 80)
 
         currentnode = raw[start : (start + 20)]
         p1 = raw[(start + 20) : (start + 40)]
@@ -313,14 +313,14 @@
 
 
 def readfile(path):
-    f = open(path, 'rb')
+    f = open(path, b'rb')
     try:
         result = f.read()
 
         # we should never have empty files
         if not result:
             os.remove(path)
-            raise IOError("empty file: %s" % path)
+            raise IOError(b"empty file: %s" % path)
 
         return result
     finally:
@@ -355,11 +355,11 @@
             if ex.errno != errno.EEXIST:
                 raise
 
-    fd, temp = tempfile.mkstemp(prefix='.%s-' % filename, dir=dirname)
+    fd, temp = tempfile.mkstemp(prefix=b'.%s-' % filename, dir=dirname)
     os.close(fd)
 
     try:
-        f = util.posixfile(temp, 'wb')
+        f = util.posixfile(temp, b'wb')
         f.write(content)
         f.close()
 
@@ -426,7 +426,7 @@
     s = stream.read(n)
     if len(s) < n:
         raise error.Abort(
-            _("stream ended unexpectedly" " (got %d bytes, expected %d)")
+            _(b"stream ended unexpectedly" b" (got %d bytes, expected %d)")
             % (len(s), n)
         )
     return s
@@ -473,18 +473,18 @@
         os.chmod(path, 0o2775)
     except (IOError, OSError) as ex:
         if warn:
-            warn(_('unable to chown/chmod on %s: %s\n') % (path, ex))
+            warn(_(b'unable to chown/chmod on %s: %s\n') % (path, ex))
 
 
 def mkstickygroupdir(ui, path):
     """Creates the given directory (if it doesn't exist) and give it a
     particular group with setgid enabled."""
     gid = None
-    groupname = ui.config("remotefilelog", "cachegroup")
+    groupname = ui.config(b"remotefilelog", b"cachegroup")
     if groupname:
         gid = getgid(groupname)
         if gid is None:
-            ui.warn(_('unable to resolve group name: %s\n') % groupname)
+            ui.warn(_(b'unable to resolve group name: %s\n') % groupname)
 
     # we use a single stat syscall to test the existence and mode / group bit
     st = None
@@ -525,11 +525,11 @@
     try:
         return stringutil.shortuser(ui.username())
     except Exception:
-        return 'unknown'
+        return b'unknown'
 
 
 def getreponame(ui):
-    reponame = ui.config('paths', 'default')
+    reponame = ui.config(b'paths', b'default')
     if reponame:
         return os.path.basename(reponame)
-    return "unknown"
+    return b"unknown"
--- a/hgext/remotefilelog/shallowverifier.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/remotefilelog/shallowverifier.py	Sun Oct 06 09:48:39 2019 -0400
@@ -14,6 +14,6 @@
     def _verifyfiles(self, filenodes, filelinkrevs):
         """Skips files verification since repo's not guaranteed to have them"""
         self.repo.ui.status(
-            _("skipping filelog check since remotefilelog is used\n")
+            _(b"skipping filelog check since remotefilelog is used\n")
         )
         return 0, 0
--- a/hgext/remotenames.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/remotenames.py	Sun Oct 06 09:48:39 2019 -0400
@@ -58,7 +58,7 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 configtable = {}
 configitem = registrar.configitem(configtable)
@@ -66,13 +66,13 @@
 revsetpredicate = registrar.revsetpredicate()
 
 configitem(
-    'remotenames', 'bookmarks', default=True,
+    b'remotenames', b'bookmarks', default=True,
 )
 configitem(
-    'remotenames', 'branches', default=True,
+    b'remotenames', b'branches', default=True,
 )
 configitem(
-    'remotenames', 'hoistedpeer', default='default',
+    b'remotenames', b'hoistedpeer', default=b'default',
 )
 
 
@@ -102,7 +102,7 @@
         for node, rpath, rname in logexchange.readremotenamefile(
             repo, self._kind
         ):
-            name = rpath + '/' + rname
+            name = rpath + b'/' + rname
             self.potentialentries[name] = (node, rpath, name)
 
     def _resolvedata(self, potentialentry):
@@ -118,7 +118,7 @@
         except LookupError:
             return None
         # Skip closed branches
-        if self._kind == 'branches' and repo[binnode].closesbranch():
+        if self._kind == b'branches' and repo[binnode].closesbranch():
             return None
         return [binnode]
 
@@ -185,8 +185,8 @@
 
     def clearnames(self):
         """ Clear all remote names state """
-        self.bookmarks = lazyremotenamedict("bookmarks", self._repo)
-        self.branches = lazyremotenamedict("branches", self._repo)
+        self.bookmarks = lazyremotenamedict(b"bookmarks", self._repo)
+        self.branches = lazyremotenamedict(b"branches", self._repo)
         self._invalidatecache()
 
     def _invalidatecache(self):
@@ -222,7 +222,7 @@
         if not self._hoisttonodes:
             marktonodes = self.bmarktonodes()
             self._hoisttonodes = {}
-            hoist += '/'
+            hoist += b'/'
             for name, node in marktonodes.iteritems():
                 if name.startswith(hoist):
                     name = name[len(hoist) :]
@@ -233,7 +233,7 @@
         if not self._nodetohoists:
             marktonodes = self.bmarktonodes()
             self._nodetohoists = {}
-            hoist += '/'
+            hoist += b'/'
             for name, node in marktonodes.iteritems():
                 if name.startswith(hoist):
                     name = name[len(hoist) :]
@@ -242,9 +242,9 @@
 
 
 def wrapprintbookmarks(orig, ui, repo, fm, bmarks):
-    if 'remotebookmarks' not in repo.names:
+    if b'remotebookmarks' not in repo.names:
         return
-    ns = repo.names['remotebookmarks']
+    ns = repo.names[b'remotebookmarks']
 
     for name in ns.listnames(repo):
         nodes = ns.nodes(repo, name)
@@ -252,19 +252,19 @@
             continue
         node = nodes[0]
 
-        bmarks[name] = (node, ' ', '')
+        bmarks[name] = (node, b' ', b'')
 
     return orig(ui, repo, fm, bmarks)
 
 
 def extsetup(ui):
-    extensions.wrapfunction(bookmarks, '_printbookmarks', wrapprintbookmarks)
+    extensions.wrapfunction(bookmarks, b'_printbookmarks', wrapprintbookmarks)
 
 
 def reposetup(ui, repo):
 
     # set the config option to store remotenames
-    repo.ui.setconfig('experimental', 'remotenames', True, 'remotenames-ext')
+    repo.ui.setconfig(b'experimental', b'remotenames', True, b'remotenames-ext')
 
     if not repo.local():
         return
@@ -272,12 +272,12 @@
     repo._remotenames = remotenames(repo)
     ns = namespaces.namespace
 
-    if ui.configbool('remotenames', 'bookmarks'):
+    if ui.configbool(b'remotenames', b'bookmarks'):
         remotebookmarkns = ns(
-            'remotebookmarks',
-            templatename='remotebookmarks',
-            colorname='remotebookmark',
-            logfmt='remote bookmark:  %s\n',
+            b'remotebookmarks',
+            templatename=b'remotebookmarks',
+            colorname=b'remotebookmark',
+            logfmt=b'remote bookmark:  %s\n',
             listnames=lambda repo: repo._remotenames.bmarktonodes().keys(),
             namemap=lambda repo, name: repo._remotenames.bmarktonodes().get(
                 name, []
@@ -289,13 +289,13 @@
         repo.names.addnamespace(remotebookmarkns)
 
         # hoisting only works if there are remote bookmarks
-        hoist = ui.config('remotenames', 'hoistedpeer')
+        hoist = ui.config(b'remotenames', b'hoistedpeer')
         if hoist:
             hoistednamens = ns(
-                'hoistednames',
-                templatename='hoistednames',
-                colorname='hoistedname',
-                logfmt='hoisted name:  %s\n',
+                b'hoistednames',
+                templatename=b'hoistednames',
+                colorname=b'hoistedname',
+                logfmt=b'hoisted name:  %s\n',
                 listnames=lambda repo: repo._remotenames.hoisttonodes(
                     hoist
                 ).keys(),
@@ -308,12 +308,12 @@
             )
             repo.names.addnamespace(hoistednamens)
 
-    if ui.configbool('remotenames', 'branches'):
+    if ui.configbool(b'remotenames', b'branches'):
         remotebranchns = ns(
-            'remotebranches',
-            templatename='remotebranches',
-            colorname='remotebranch',
-            logfmt='remote branch:  %s\n',
+            b'remotebranches',
+            templatename=b'remotebranches',
+            colorname=b'remotebranch',
+            logfmt=b'remote branch:  %s\n',
             listnames=lambda repo: repo._remotenames.branchtonodes().keys(),
             namemap=lambda repo, name: repo._remotenames.branchtonodes().get(
                 name, []
@@ -325,68 +325,68 @@
         repo.names.addnamespace(remotebranchns)
 
 
-@templatekeyword('remotenames', requires={'repo', 'ctx'})
+@templatekeyword(b'remotenames', requires={b'repo', b'ctx'})
 def remotenameskw(context, mapping):
     """List of strings. Remote names associated with the changeset."""
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
 
     remotenames = []
-    if 'remotebookmarks' in repo.names:
-        remotenames = repo.names['remotebookmarks'].names(repo, ctx.node())
+    if b'remotebookmarks' in repo.names:
+        remotenames = repo.names[b'remotebookmarks'].names(repo, ctx.node())
 
-    if 'remotebranches' in repo.names:
-        remotenames += repo.names['remotebranches'].names(repo, ctx.node())
+    if b'remotebranches' in repo.names:
+        remotenames += repo.names[b'remotebranches'].names(repo, ctx.node())
 
     return templateutil.compatlist(
-        context, mapping, 'remotename', remotenames, plural='remotenames'
+        context, mapping, b'remotename', remotenames, plural=b'remotenames'
     )
 
 
-@templatekeyword('remotebookmarks', requires={'repo', 'ctx'})
+@templatekeyword(b'remotebookmarks', requires={b'repo', b'ctx'})
 def remotebookmarkskw(context, mapping):
     """List of strings. Remote bookmarks associated with the changeset."""
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
 
     remotebmarks = []
-    if 'remotebookmarks' in repo.names:
-        remotebmarks = repo.names['remotebookmarks'].names(repo, ctx.node())
+    if b'remotebookmarks' in repo.names:
+        remotebmarks = repo.names[b'remotebookmarks'].names(repo, ctx.node())
 
     return templateutil.compatlist(
         context,
         mapping,
-        'remotebookmark',
+        b'remotebookmark',
         remotebmarks,
-        plural='remotebookmarks',
+        plural=b'remotebookmarks',
     )
 
 
-@templatekeyword('remotebranches', requires={'repo', 'ctx'})
+@templatekeyword(b'remotebranches', requires={b'repo', b'ctx'})
 def remotebrancheskw(context, mapping):
     """List of strings. Remote branches associated with the changeset."""
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
 
     remotebranches = []
-    if 'remotebranches' in repo.names:
-        remotebranches = repo.names['remotebranches'].names(repo, ctx.node())
+    if b'remotebranches' in repo.names:
+        remotebranches = repo.names[b'remotebranches'].names(repo, ctx.node())
 
     return templateutil.compatlist(
         context,
         mapping,
-        'remotebranch',
+        b'remotebranch',
         remotebranches,
-        plural='remotebranches',
+        plural=b'remotebranches',
     )
 
 
 def _revsetutil(repo, subset, x, rtypes):
     """utility function to return a set of revs based on the rtypes"""
-    args = revsetlang.getargs(x, 0, 1, _('only one argument accepted'))
+    args = revsetlang.getargs(x, 0, 1, _(b'only one argument accepted'))
     if args:
         kind, pattern, matcher = stringutil.stringmatcher(
-            revsetlang.getstring(args[0], _('argument must be a string'))
+            revsetlang.getstring(args[0], _(b'argument must be a string'))
         )
     else:
         kind = pattern = None
@@ -401,40 +401,40 @@
                 if not matcher(name):
                     continue
                 nodes.update(ns.nodes(repo, name))
-    if kind == 'literal' and not nodes:
+    if kind == b'literal' and not nodes:
         raise error.RepoLookupError(
-            _("remote name '%s' does not exist") % pattern
+            _(b"remote name '%s' does not exist") % pattern
         )
 
     revs = (cl.rev(n) for n in nodes if cl.hasnode(n))
     return subset & smartset.baseset(revs)
 
 
-@revsetpredicate('remotenames([name])')
+@revsetpredicate(b'remotenames([name])')
 def remotenamesrevset(repo, subset, x):
     """All changesets which have a remotename on them. If `name` is
     specified, only remotenames of matching remote paths are considered.
 
     Pattern matching is supported for `name`. See :hg:`help revisions.patterns`.
     """
-    return _revsetutil(repo, subset, x, ('remotebookmarks', 'remotebranches'))
+    return _revsetutil(repo, subset, x, (b'remotebookmarks', b'remotebranches'))
 
 
-@revsetpredicate('remotebranches([name])')
+@revsetpredicate(b'remotebranches([name])')
 def remotebranchesrevset(repo, subset, x):
     """All changesets which are branch heads on remotes. If `name` is
     specified, only remotenames of matching remote paths are considered.
 
     Pattern matching is supported for `name`. See :hg:`help revisions.patterns`.
     """
-    return _revsetutil(repo, subset, x, ('remotebranches',))
+    return _revsetutil(repo, subset, x, (b'remotebranches',))
 
 
-@revsetpredicate('remotebookmarks([name])')
+@revsetpredicate(b'remotebookmarks([name])')
 def remotebmarksrevset(repo, subset, x):
     """All changesets which have bookmarks on remotes. If `name` is
     specified, only remotenames of matching remote paths are considered.
 
     Pattern matching is supported for `name`. See :hg:`help revisions.patterns`.
     """
-    return _revsetutil(repo, subset, x, ('remotebookmarks',))
+    return _revsetutil(repo, subset, x, (b'remotebookmarks',))
--- a/hgext/schemes.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/schemes.py	Sun Oct 06 09:48:39 2019 -0400
@@ -61,7 +61,7 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 _partre = re.compile(br'\{(\d+)\}')
 
@@ -77,7 +77,7 @@
             self.parts = 0
 
     def __repr__(self):
-        return '<ShortRepository: %s>' % self.scheme
+        return b'<ShortRepository: %s>' % self.scheme
 
     def instance(self, ui, url, create, intents=None, createopts=None):
         url = self.resolve(url)
@@ -88,60 +88,63 @@
     def resolve(self, url):
         # Should this use the util.url class, or is manual parsing better?
         try:
-            url = url.split('://', 1)[1]
+            url = url.split(b'://', 1)[1]
         except IndexError:
-            raise error.Abort(_("no '://' in scheme url '%s'") % url)
-        parts = url.split('/', self.parts)
+            raise error.Abort(_(b"no '://' in scheme url '%s'") % url)
+        parts = url.split(b'/', self.parts)
         if len(parts) > self.parts:
             tail = parts[-1]
             parts = parts[:-1]
         else:
-            tail = ''
-        context = dict(('%d' % (i + 1), v) for i, v in enumerate(parts))
-        return ''.join(self.templater.process(self.url, context)) + tail
+            tail = b''
+        context = dict((b'%d' % (i + 1), v) for i, v in enumerate(parts))
+        return b''.join(self.templater.process(self.url, context)) + tail
 
 
 def hasdriveletter(orig, path):
     if path:
         for scheme in schemes:
-            if path.startswith(scheme + ':'):
+            if path.startswith(scheme + b':'):
                 return False
     return orig(path)
 
 
 schemes = {
-    'py': 'http://hg.python.org/',
-    'bb': 'https://bitbucket.org/',
-    'bb+ssh': 'ssh://hg@bitbucket.org/',
-    'gcode': 'https://{1}.googlecode.com/hg/',
-    'kiln': 'https://{1}.kilnhg.com/Repo/',
+    b'py': b'http://hg.python.org/',
+    b'bb': b'https://bitbucket.org/',
+    b'bb+ssh': b'ssh://hg@bitbucket.org/',
+    b'gcode': b'https://{1}.googlecode.com/hg/',
+    b'kiln': b'https://{1}.kilnhg.com/Repo/',
 }
 
 
 def extsetup(ui):
-    schemes.update(dict(ui.configitems('schemes')))
+    schemes.update(dict(ui.configitems(b'schemes')))
     t = templater.engine(templater.parse)
     for scheme, url in schemes.items():
         if (
             pycompat.iswindows
             and len(scheme) == 1
             and scheme.isalpha()
-            and os.path.exists('%s:\\' % scheme)
+            and os.path.exists(b'%s:\\' % scheme)
         ):
             raise error.Abort(
-                _('custom scheme %s:// conflicts with drive ' 'letter %s:\\\n')
+                _(
+                    b'custom scheme %s:// conflicts with drive '
+                    b'letter %s:\\\n'
+                )
                 % (scheme, scheme.upper())
             )
         hg.schemes[scheme] = ShortRepository(url, scheme, t)
 
-    extensions.wrapfunction(util, 'hasdriveletter', hasdriveletter)
+    extensions.wrapfunction(util, b'hasdriveletter', hasdriveletter)
 
 
-@command('debugexpandscheme', norepo=True)
+@command(b'debugexpandscheme', norepo=True)
 def expandscheme(ui, url, **opts):
     """given a repo path, provide the scheme-expanded path
     """
     repo = hg._peerlookup(url)
     if isinstance(repo, ShortRepository):
         url = repo.resolve(url)
-    ui.write(url + '\n')
+    ui.write(url + b'\n')
--- a/hgext/share.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/share.py	Sun Oct 06 09:48:39 2019 -0400
@@ -58,22 +58,22 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 
 @command(
-    'share',
+    b'share',
     [
-        ('U', 'noupdate', None, _('do not create a working directory')),
-        ('B', 'bookmarks', None, _('also share bookmarks')),
+        (b'U', b'noupdate', None, _(b'do not create a working directory')),
+        (b'B', b'bookmarks', None, _(b'also share bookmarks')),
         (
-            '',
-            'relative',
+            b'',
+            b'relative',
             None,
-            _('point to source using a relative path ' '(EXPERIMENTAL)'),
+            _(b'point to source using a relative path ' b'(EXPERIMENTAL)'),
         ),
     ],
-    _('[-U] [-B] SOURCE [DEST]'),
+    _(b'[-U] [-B] SOURCE [DEST]'),
     helpcategory=command.CATEGORY_REPO_CREATION,
     norepo=True,
 )
@@ -108,7 +108,7 @@
     return 0
 
 
-@command('unshare', [], '', helpcategory=command.CATEGORY_MAINTENANCE)
+@command(b'unshare', [], b'', helpcategory=command.CATEGORY_MAINTENANCE)
 def unshare(ui, repo):
     """convert a shared repository to a normal one
 
@@ -116,30 +116,30 @@
     """
 
     if not repo.shared():
-        raise error.Abort(_("this is not a shared repo"))
+        raise error.Abort(_(b"this is not a shared repo"))
 
     hg.unshare(ui, repo)
 
 
 # Wrap clone command to pass auto share options.
 def clone(orig, ui, source, *args, **opts):
-    pool = ui.config('share', 'pool')
+    pool = ui.config(b'share', b'pool')
     if pool:
         pool = util.expandpath(pool)
 
     opts[r'shareopts'] = {
-        'pool': pool,
-        'mode': ui.config('share', 'poolnaming'),
+        b'pool': pool,
+        b'mode': ui.config(b'share', b'poolnaming'),
     }
 
     return orig(ui, source, *args, **opts)
 
 
 def extsetup(ui):
-    extensions.wrapfunction(bookmarks, '_getbkfile', getbkfile)
-    extensions.wrapfunction(bookmarks.bmstore, '_recordchange', recordchange)
-    extensions.wrapfunction(bookmarks.bmstore, '_writerepo', writerepo)
-    extensions.wrapcommand(commands.table, 'clone', clone)
+    extensions.wrapfunction(bookmarks, b'_getbkfile', getbkfile)
+    extensions.wrapfunction(bookmarks.bmstore, b'_recordchange', recordchange)
+    extensions.wrapfunction(bookmarks.bmstore, b'_writerepo', writerepo)
+    extensions.wrapcommand(commands.table, b'clone', clone)
 
 
 def _hassharedbookmarks(repo):
@@ -149,7 +149,7 @@
         # from/to the source repo.
         return False
     try:
-        shared = repo.vfs.read('shared').splitlines()
+        shared = repo.vfs.read(b'shared').splitlines()
     except IOError as inst:
         if inst.errno != errno.ENOENT:
             raise
@@ -165,7 +165,7 @@
             # HG_PENDING refers repo.root.
             try:
                 fp, pending = txnutil.trypending(
-                    repo.root, repo.vfs, 'bookmarks'
+                    repo.root, repo.vfs, b'bookmarks'
                 )
                 if pending:
                     # only in this case, bookmark information in repo
@@ -194,7 +194,7 @@
     if _hassharedbookmarks(self._repo):
         srcrepo = hg.sharedreposource(self._repo)
         if srcrepo is not None:
-            category = 'share-bookmarks'
+            category = b'share-bookmarks'
             tr.addpostclose(category, lambda tr: self._writerepo(srcrepo))
 
 
--- a/hgext/show.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/show.py	Sun Oct 06 09:48:39 2019 -0400
@@ -49,7 +49,7 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 cmdtable = {}
 command = registrar.command(cmdtable)
@@ -61,7 +61,7 @@
     """Register a function to be invoked for an `hg show <thing>`."""
 
     # Used by _formatdoc().
-    _docformat = '%s -- %s'
+    _docformat = b'%s -- %s'
 
     def _extrasetup(self, name, func, fmtopic=None, csettopic=None):
         """Called with decorator arguments to register a show view.
@@ -88,16 +88,16 @@
 
 
 @command(
-    'show',
+    b'show',
     [
         # TODO: Switch this template flag to use cmdutil.formatteropts if
         # 'hg show' becomes stable before --template/-T is stable. For now,
         # we are putting it here without the '(EXPERIMENTAL)' flag because it
         # is an important part of the 'hg show' user experience and the entire
         # 'hg show' experience is experimental.
-        ('T', 'template', '', 'display with template', _('TEMPLATE')),
+        (b'T', b'template', b'', b'display with template', _(b'TEMPLATE')),
     ],
-    _('VIEW'),
+    _(b'VIEW'),
     helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
 )
 def show(ui, repo, view=None, template=None):
@@ -119,45 +119,47 @@
     List of available views:
     """
     if ui.plain() and not template:
-        hint = _('invoke with -T/--template to control output format')
-        raise error.Abort(_('must specify a template in plain mode'), hint=hint)
+        hint = _(b'invoke with -T/--template to control output format')
+        raise error.Abort(
+            _(b'must specify a template in plain mode'), hint=hint
+        )
 
     views = showview._table
 
     if not view:
-        ui.pager('show')
+        ui.pager(b'show')
         # TODO consider using formatter here so available views can be
         # rendered to custom format.
-        ui.write(_('available views:\n'))
-        ui.write('\n')
+        ui.write(_(b'available views:\n'))
+        ui.write(b'\n')
 
         for name, func in sorted(views.items()):
-            ui.write('%s\n' % pycompat.sysbytes(func.__doc__))
+            ui.write(b'%s\n' % pycompat.sysbytes(func.__doc__))
 
-        ui.write('\n')
+        ui.write(b'\n')
         raise error.Abort(
-            _('no view requested'),
-            hint=_('use "hg show VIEW" to choose a view'),
+            _(b'no view requested'),
+            hint=_(b'use "hg show VIEW" to choose a view'),
         )
 
     # TODO use same logic as dispatch to perform prefix matching.
     if view not in views:
         raise error.Abort(
-            _('unknown view: %s') % view,
-            hint=_('run "hg show" to see available views'),
+            _(b'unknown view: %s') % view,
+            hint=_(b'run "hg show" to see available views'),
         )
 
-    template = template or 'show'
+    template = template or b'show'
 
     fn = views[view]
-    ui.pager('show')
+    ui.pager(b'show')
 
     if fn._fmtopic:
-        fmtopic = 'show%s' % fn._fmtopic
-        with ui.formatter(fmtopic, {'template': template}) as fm:
+        fmtopic = b'show%s' % fn._fmtopic
+        with ui.formatter(fmtopic, {b'template': template}) as fm:
             return fn(ui, repo, fm)
     elif fn._csettopic:
-        ref = 'show%s' % fn._csettopic
+        ref = b'show%s' % fn._csettopic
         spec = formatter.lookuptemplate(ui, ref, template)
         displayer = logcmdutil.changesettemplater(ui, repo, spec, buffered=True)
         return fn(ui, repo, displayer)
@@ -165,7 +167,7 @@
         return fn(ui, repo)
 
 
-@showview('bookmarks', fmtopic='bookmarks')
+@showview(b'bookmarks', fmtopic=b'bookmarks')
 def showbookmarks(ui, repo, fm):
     """bookmarks and their associated changeset"""
     marks = repo._bookmarks
@@ -174,7 +176,7 @@
         # specify an empty output, but we shouldn't corrupt JSON while
         # waiting for this functionality.
         if not isinstance(fm, formatter.jsonformatter):
-            ui.write(_('(no bookmarks set)\n'))
+            ui.write(_(b'(no bookmarks set)\n'))
         return
 
     revs = [repo[node].rev() for node in marks.values()]
@@ -185,27 +187,30 @@
     for bm, node in sorted(marks.items()):
         fm.startitem()
         fm.context(ctx=repo[node])
-        fm.write('bookmark', '%s', bm)
-        fm.write('node', fm.hexfunc(node), fm.hexfunc(node))
+        fm.write(b'bookmark', b'%s', bm)
+        fm.write(b'node', fm.hexfunc(node), fm.hexfunc(node))
         fm.data(
             active=bm == active, longestbookmarklen=longestname, nodelen=nodelen
         )
 
 
-@showview('stack', csettopic='stack')
+@showview(b'stack', csettopic=b'stack')
 def showstack(ui, repo, displayer):
     """current line of work"""
-    wdirctx = repo['.']
+    wdirctx = repo[b'.']
     if wdirctx.rev() == nullrev:
         raise error.Abort(
-            _('stack view only available when there is a ' 'working directory')
+            _(
+                b'stack view only available when there is a '
+                b'working directory'
+            )
         )
 
     if wdirctx.phase() == phases.public:
         ui.write(
             _(
-                '(empty stack; working directory parent is a published '
-                'changeset)\n'
+                b'(empty stack; working directory parent is a published '
+                b'changeset)\n'
             )
         )
         return
@@ -220,7 +225,7 @@
         baserev = wdirctx.rev()
         stackrevs = {wdirctx.rev()}
     else:
-        stackrevs = set(repo.revs('%d::.', baserev))
+        stackrevs = set(repo.revs(b'%d::.', baserev))
 
     ctx = repo[baserev]
     if ctx.p1().rev() != nullrev:
@@ -256,7 +261,7 @@
         # TODO make this customizable?
         newheads = set(
             repo.revs(
-                'heads(%d::) - %ld - not public()', basectx.rev(), stackrevs
+                b'heads(%d::) - %ld - not public()', basectx.rev(), stackrevs
             )
         )
     else:
@@ -266,7 +271,7 @@
     nodelen = longestshortest(repo, allrevs)
 
     try:
-        cmdutil.findcmd('rebase', commands.table)
+        cmdutil.findcmd(b'rebase', commands.table)
         haverebase = True
     except (error.AmbiguousCommand, error.UnknownCommand):
         haverebase = False
@@ -278,11 +283,11 @@
 
     tres = formatter.templateresources(ui, repo)
     shortesttmpl = formatter.maketemplater(
-        ui, '{shortest(node, %d)}' % nodelen, resources=tres
+        ui, b'{shortest(node, %d)}' % nodelen, resources=tres
     )
 
     def shortest(ctx):
-        return shortesttmpl.renderdefault({'ctx': ctx, 'node': ctx.hex()})
+        return shortesttmpl.renderdefault({b'ctx': ctx, b'node': ctx.hex()})
 
     # We write out new heads to aid in DAG awareness and to help with decision
     # making on how the stack should be reconciled with commits made since the
@@ -307,60 +312,60 @@
             ctx = repo[rev]
 
             if i:
-                ui.write(': ')
+                ui.write(b': ')
             else:
-                ui.write('  ')
+                ui.write(b'  ')
 
-            ui.write('o  ')
+            ui.write(b'o  ')
             displayer.show(ctx, nodelen=nodelen)
             displayer.flush(ctx)
-            ui.write('\n')
+            ui.write(b'\n')
 
             if i:
-                ui.write(':/')
+                ui.write(b':/')
             else:
-                ui.write(' /')
+                ui.write(b' /')
 
-            ui.write('    (')
+            ui.write(b'    (')
             ui.write(
-                _('%d commits ahead') % revdistance[rev],
-                label='stack.commitdistance',
+                _(b'%d commits ahead') % revdistance[rev],
+                label=b'stack.commitdistance',
             )
 
             if haverebase:
                 # TODO may be able to omit --source in some scenarios
-                ui.write('; ')
+                ui.write(b'; ')
                 ui.write(
                     (
-                        'hg rebase --source %s --dest %s'
+                        b'hg rebase --source %s --dest %s'
                         % (shortest(sourcectx), shortest(ctx))
                     ),
-                    label='stack.rebasehint',
+                    label=b'stack.rebasehint',
                 )
 
-            ui.write(')\n')
+            ui.write(b')\n')
 
-        ui.write(':\n:    ')
-        ui.write(_('(stack head)\n'), label='stack.label')
+        ui.write(b':\n:    ')
+        ui.write(_(b'(stack head)\n'), label=b'stack.label')
 
     if branchpointattip:
-        ui.write(' \\ /  ')
-        ui.write(_('(multiple children)\n'), label='stack.label')
-        ui.write('  |\n')
+        ui.write(b' \\ /  ')
+        ui.write(_(b'(multiple children)\n'), label=b'stack.label')
+        ui.write(b'  |\n')
 
     for rev in stackrevs:
         ctx = repo[rev]
-        symbol = '@' if rev == wdirctx.rev() else 'o'
+        symbol = b'@' if rev == wdirctx.rev() else b'o'
 
         if newheads:
-            ui.write(': ')
+            ui.write(b': ')
         else:
-            ui.write('  ')
+            ui.write(b'  ')
 
-        ui.write(symbol, '  ')
+        ui.write(symbol, b'  ')
         displayer.show(ctx, nodelen=nodelen)
         displayer.flush(ctx)
-        ui.write('\n')
+        ui.write(b'\n')
 
     # TODO display histedit hint?
 
@@ -368,25 +373,25 @@
         # Vertically and horizontally separate stack base from parent
         # to reinforce stack boundary.
         if newheads:
-            ui.write(':/   ')
+            ui.write(b':/   ')
         else:
-            ui.write(' /   ')
+            ui.write(b' /   ')
 
-        ui.write(_('(stack base)'), '\n', label='stack.label')
-        ui.write('o  ')
+        ui.write(_(b'(stack base)'), b'\n', label=b'stack.label')
+        ui.write(b'o  ')
 
         displayer.show(basectx, nodelen=nodelen)
         displayer.flush(basectx)
-        ui.write('\n')
+        ui.write(b'\n')
 
 
-@revsetpredicate('_underway([commitage[, headage]])')
+@revsetpredicate(b'_underway([commitage[, headage]])')
 def underwayrevset(repo, subset, x):
-    args = revset.getargsdict(x, 'underway', 'commitage headage')
-    if 'commitage' not in args:
-        args['commitage'] = None
-    if 'headage' not in args:
-        args['headage'] = None
+    args = revset.getargsdict(x, b'underway', b'commitage headage')
+    if b'commitage' not in args:
+        args[b'commitage'] = None
+    if b'headage' not in args:
+        args[b'headage'] = None
 
     # We assume callers of this revset add a topographical sort on the
     # result. This means there is no benefit to making the revset lazy
@@ -399,13 +404,13 @@
     # to return. ``not public()`` will also pull in obsolete changesets if
     # there is a non-obsolete changeset with obsolete ancestors. This is
     # why we exclude obsolete changesets from this query.
-    rs = 'not public() and not obsolete()'
+    rs = b'not public() and not obsolete()'
     rsargs = []
-    if args['commitage']:
-        rs += ' and date(%s)'
+    if args[b'commitage']:
+        rs += b' and date(%s)'
         rsargs.append(
             revsetlang.getstring(
-                args['commitage'], _('commitage requires a string')
+                args[b'commitage'], _(b'commitage requires a string')
             )
         )
 
@@ -413,55 +418,55 @@
     relevant = revset.baseset(mutable)
 
     # Add parents of mutable changesets to provide context.
-    relevant += repo.revs('parents(%ld)', mutable)
+    relevant += repo.revs(b'parents(%ld)', mutable)
 
     # We also pull in (public) heads if they a) aren't closing a branch
     # b) are recent.
-    rs = 'head() and not closed()'
+    rs = b'head() and not closed()'
     rsargs = []
-    if args['headage']:
-        rs += ' and date(%s)'
+    if args[b'headage']:
+        rs += b' and date(%s)'
         rsargs.append(
             revsetlang.getstring(
-                args['headage'], _('headage requires a string')
+                args[b'headage'], _(b'headage requires a string')
             )
         )
 
     relevant += repo.revs(rs, *rsargs)
 
     # Add working directory parent.
-    wdirrev = repo['.'].rev()
+    wdirrev = repo[b'.'].rev()
     if wdirrev != nullrev:
         relevant += revset.baseset({wdirrev})
 
     return subset & relevant
 
 
-@showview('work', csettopic='work')
+@showview(b'work', csettopic=b'work')
 def showwork(ui, repo, displayer):
     """changesets that aren't finished"""
     # TODO support date-based limiting when calling revset.
-    revs = repo.revs('sort(_underway(), topo)')
+    revs = repo.revs(b'sort(_underway(), topo)')
     nodelen = longestshortest(repo, revs)
 
     revdag = graphmod.dagwalker(repo, revs)
 
-    ui.setconfig('experimental', 'graphshorten', True)
+    ui.setconfig(b'experimental', b'graphshorten', True)
     logcmdutil.displaygraph(
         ui,
         repo,
         revdag,
         displayer,
         graphmod.asciiedges,
-        props={'nodelen': nodelen},
+        props={b'nodelen': nodelen},
     )
 
 
 def extsetup(ui):
     # Alias `hg <prefix><view>` to `hg show <view>`.
-    for prefix in ui.configlist('commands', 'show.aliasprefix'):
+    for prefix in ui.configlist(b'commands', b'show.aliasprefix'):
         for view in showview._table:
-            name = '%s%s' % (prefix, view)
+            name = b'%s%s' % (prefix, view)
 
             choice, allcommands = cmdutil.findpossible(
                 name, commands.table, strict=True
@@ -472,10 +477,10 @@
                 continue
 
             # Same for aliases.
-            if ui.config('alias', name, None):
+            if ui.config(b'alias', name, None):
                 continue
 
-            ui.setconfig('alias', name, 'show %s' % view, source='show')
+            ui.setconfig(b'alias', name, b'show %s' % view, source=b'show')
 
 
 def longestshortest(repo, revs, minlen=4):
@@ -516,9 +521,9 @@
             )
         )
 
-    cmdtable['show'][0].__doc__ = pycompat.sysstr('%s\n\n%s\n    ') % (
-        cmdtable['show'][0].__doc__.rstrip(),
-        pycompat.sysstr('\n\n').join(entries),
+    cmdtable[b'show'][0].__doc__ = pycompat.sysstr(b'%s\n\n%s\n    ') % (
+        cmdtable[b'show'][0].__doc__.rstrip(),
+        pycompat.sysstr(b'\n\n').join(entries),
     )
 
 
--- a/hgext/sparse.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/sparse.py	Sun Oct 06 09:48:39 2019 -0400
@@ -92,7 +92,7 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 cmdtable = {}
 command = registrar.command(cmdtable)
@@ -121,24 +121,24 @@
 
     if cls is object:
         raise AttributeError(
-            _("type '%s' has no property '%s'") % (origcls, propname)
+            _(b"type '%s' has no property '%s'") % (origcls, propname)
         )
 
 
 def _setuplog(ui):
-    entry = commands.table['log|history']
+    entry = commands.table[b'log|history']
     entry[1].append(
         (
-            '',
-            'sparse',
+            b'',
+            b'sparse',
             None,
-            "limit to changesets affecting the sparse checkout",
+            b"limit to changesets affecting the sparse checkout",
         )
     )
 
     def _initialrevs(orig, repo, opts):
         revs = orig(repo, opts)
-        if opts.get('sparse'):
+        if opts.get(b'sparse'):
             sparsematch = sparse.matcher(repo)
 
             def ctxmatch(rev):
@@ -148,7 +148,7 @@
             revs = revs.filter(ctxmatch)
         return revs
 
-    extensions.wrapfunction(logcmdutil, '_initialrevs', _initialrevs)
+    extensions.wrapfunction(logcmdutil, b'_initialrevs', _initialrevs)
 
 
 def _clonesparsecmd(orig, ui, repo, *args, **opts):
@@ -167,7 +167,7 @@
         pat = enableprofile_pat
         enableprofile = True
     if sum([include, exclude, enableprofile]) > 1:
-        raise error.Abort(_("too many flags specified."))
+        raise error.Abort(_(b"too many flags specified."))
     # if --narrow is passed, it means they are includes and excludes for narrow
     # clone
     if not narrow_pat and (include or exclude or enableprofile):
@@ -184,26 +184,26 @@
             )
             return orig(self, node, overwrite, *args, **kwargs)
 
-        extensions.wrapfunction(hg, 'updaterepo', clonesparse)
+        extensions.wrapfunction(hg, b'updaterepo', clonesparse)
     return orig(ui, repo, *args, **opts)
 
 
 def _setupclone(ui):
-    entry = commands.table['clone']
-    entry[1].append(('', 'enable-profile', [], 'enable a sparse profile'))
-    entry[1].append(('', 'include', [], 'include sparse pattern'))
-    entry[1].append(('', 'exclude', [], 'exclude sparse pattern'))
-    extensions.wrapcommand(commands.table, 'clone', _clonesparsecmd)
+    entry = commands.table[b'clone']
+    entry[1].append((b'', b'enable-profile', [], b'enable a sparse profile'))
+    entry[1].append((b'', b'include', [], b'include sparse pattern'))
+    entry[1].append((b'', b'exclude', [], b'exclude sparse pattern'))
+    extensions.wrapcommand(commands.table, b'clone', _clonesparsecmd)
 
 
 def _setupadd(ui):
-    entry = commands.table['add']
+    entry = commands.table[b'add']
     entry[1].append(
         (
-            's',
-            'sparse',
+            b's',
+            b'sparse',
             None,
-            'also include directories of added files in sparse config',
+            b'also include directories of added files in sparse config',
         )
     )
 
@@ -216,7 +216,7 @@
             sparse.updateconfig(repo, list(dirs), opts, include=True)
         return orig(ui, repo, *pats, **opts)
 
-    extensions.wrapcommand(commands.table, 'add', _add)
+    extensions.wrapcommand(commands.table, b'add', _add)
 
 
 def _setupdirstate(ui):
@@ -232,7 +232,7 @@
         match = matchmod.intersectmatchers(match, sm)
         return orig(self, match, subrepos, unknown, ignored, full)
 
-    extensions.wrapfunction(dirstate.dirstate, 'walk', walk)
+    extensions.wrapfunction(dirstate.dirstate, b'walk', walk)
 
     # dirstate.rebuild should not add non-matching files
     def _rebuild(orig, self, parent, allfiles, changedfiles=None):
@@ -250,13 +250,20 @@
 
         return orig(self, parent, allfiles, changedfiles)
 
-    extensions.wrapfunction(dirstate.dirstate, 'rebuild', _rebuild)
+    extensions.wrapfunction(dirstate.dirstate, b'rebuild', _rebuild)
 
     # Prevent adding files that are outside the sparse checkout
-    editfuncs = ['normal', 'add', 'normallookup', 'copy', 'remove', 'merge']
+    editfuncs = [
+        b'normal',
+        b'add',
+        b'normallookup',
+        b'copy',
+        b'remove',
+        b'merge',
+    ]
     hint = _(
-        'include file with `hg debugsparse --include <pattern>` or use '
-        + '`hg add -s <file>` to include file directory while adding'
+        b'include file with `hg debugsparse --include <pattern>` or use '
+        + b'`hg add -s <file>` to include file directory while adding'
     )
     for func in editfuncs:
 
@@ -267,8 +274,8 @@
                     if f is not None and not sparsematch(f) and f not in self:
                         raise error.Abort(
                             _(
-                                "cannot add '%s' - it is outside "
-                                "the sparse checkout"
+                                b"cannot add '%s' - it is outside "
+                                b"the sparse checkout"
                             )
                             % f,
                             hint=hint,
@@ -279,31 +286,31 @@
 
 
 @command(
-    'debugsparse',
+    b'debugsparse',
     [
-        ('I', 'include', False, _('include files in the sparse checkout')),
-        ('X', 'exclude', False, _('exclude files in the sparse checkout')),
-        ('d', 'delete', False, _('delete an include/exclude rule')),
+        (b'I', b'include', False, _(b'include files in the sparse checkout')),
+        (b'X', b'exclude', False, _(b'exclude files in the sparse checkout')),
+        (b'd', b'delete', False, _(b'delete an include/exclude rule')),
         (
-            'f',
-            'force',
+            b'f',
+            b'force',
             False,
-            _('allow changing rules even with pending changes'),
+            _(b'allow changing rules even with pending changes'),
         ),
-        ('', 'enable-profile', False, _('enables the specified profile')),
-        ('', 'disable-profile', False, _('disables the specified profile')),
-        ('', 'import-rules', False, _('imports rules from a file')),
-        ('', 'clear-rules', False, _('clears local include/exclude rules')),
+        (b'', b'enable-profile', False, _(b'enables the specified profile')),
+        (b'', b'disable-profile', False, _(b'disables the specified profile')),
+        (b'', b'import-rules', False, _(b'imports rules from a file')),
+        (b'', b'clear-rules', False, _(b'clears local include/exclude rules')),
         (
-            '',
-            'refresh',
+            b'',
+            b'refresh',
             False,
-            _('updates the working after sparseness changes'),
+            _(b'updates the working after sparseness changes'),
         ),
-        ('', 'reset', False, _('makes the repo full again')),
+        (b'', b'reset', False, _(b'makes the repo full again')),
     ]
     + commands.templateopts,
-    _('[--OPTION] PATTERN...'),
+    _(b'[--OPTION] PATTERN...'),
     helpbasic=True,
 )
 def debugsparse(ui, repo, *pats, **opts):
@@ -348,16 +355,16 @@
     Returns 0 if editing the sparse checkout succeeds.
     """
     opts = pycompat.byteskwargs(opts)
-    include = opts.get('include')
-    exclude = opts.get('exclude')
-    force = opts.get('force')
-    enableprofile = opts.get('enable_profile')
-    disableprofile = opts.get('disable_profile')
-    importrules = opts.get('import_rules')
-    clearrules = opts.get('clear_rules')
-    delete = opts.get('delete')
-    refresh = opts.get('refresh')
-    reset = opts.get('reset')
+    include = opts.get(b'include')
+    exclude = opts.get(b'exclude')
+    force = opts.get(b'force')
+    enableprofile = opts.get(b'enable_profile')
+    disableprofile = opts.get(b'disable_profile')
+    importrules = opts.get(b'import_rules')
+    clearrules = opts.get(b'clear_rules')
+    delete = opts.get(b'delete')
+    refresh = opts.get(b'refresh')
+    reset = opts.get(b'reset')
     count = sum(
         [
             include,
@@ -372,21 +379,23 @@
         ]
     )
     if count > 1:
-        raise error.Abort(_("too many flags specified"))
+        raise error.Abort(_(b"too many flags specified"))
 
     if count == 0:
-        if repo.vfs.exists('sparse'):
-            ui.status(repo.vfs.read("sparse") + "\n")
+        if repo.vfs.exists(b'sparse'):
+            ui.status(repo.vfs.read(b"sparse") + b"\n")
             temporaryincludes = sparse.readtemporaryincludes(repo)
             if temporaryincludes:
-                ui.status(_("Temporarily Included Files (for merge/rebase):\n"))
-                ui.status(("\n".join(temporaryincludes) + "\n"))
+                ui.status(
+                    _(b"Temporarily Included Files (for merge/rebase):\n")
+                )
+                ui.status((b"\n".join(temporaryincludes) + b"\n"))
             return
         else:
             raise error.Abort(
                 _(
-                    'the debugsparse command is only supported on'
-                    ' sparse repositories'
+                    b'the debugsparse command is only supported on'
+                    b' sparse repositories'
                 )
             )
 
--- a/hgext/split.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/split.py	Sun Oct 06 09:48:39 2019 -0400
@@ -40,17 +40,17 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 
 @command(
-    'split',
+    b'split',
     [
-        ('r', 'rev', '', _("revision to split"), _('REV')),
-        ('', 'rebase', True, _('rebase descendants after split')),
+        (b'r', b'rev', b'', _(b"revision to split"), _(b'REV')),
+        (b'', b'rebase', True, _(b'rebase descendants after split')),
     ]
     + cmdutil.commitopts2,
-    _('hg split [--no-rebase] [[-r] REV]'),
+    _(b'hg split [--no-rebase] [[-r] REV]'),
     helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
     helpbasic=True,
 )
@@ -67,21 +67,21 @@
     """
     opts = pycompat.byteskwargs(opts)
     revlist = []
-    if opts.get('rev'):
-        revlist.append(opts.get('rev'))
+    if opts.get(b'rev'):
+        revlist.append(opts.get(b'rev'))
     revlist.extend(revs)
-    with repo.wlock(), repo.lock(), repo.transaction('split') as tr:
-        revs = scmutil.revrange(repo, revlist or ['.'])
+    with repo.wlock(), repo.lock(), repo.transaction(b'split') as tr:
+        revs = scmutil.revrange(repo, revlist or [b'.'])
         if len(revs) > 1:
-            raise error.Abort(_('cannot split multiple revisions'))
+            raise error.Abort(_(b'cannot split multiple revisions'))
 
         rev = revs.first()
         ctx = repo[rev]
         if rev is None or ctx.node() == nullid:
-            ui.status(_('nothing to split\n'))
+            ui.status(_(b'nothing to split\n'))
             return 1
         if ctx.node() is None:
-            raise error.Abort(_('cannot split working directory'))
+            raise error.Abort(_(b'cannot split working directory'))
 
         # rewriteutil.precheck is not very useful here because:
         # 1. null check is done above and it's more friendly to return 1
@@ -92,33 +92,33 @@
         # So only "public" check is useful and it's checked directly here.
         if ctx.phase() == phases.public:
             raise error.Abort(
-                _('cannot split public changeset'),
-                hint=_("see 'hg help phases' for details"),
+                _(b'cannot split public changeset'),
+                hint=_(b"see 'hg help phases' for details"),
             )
 
-        descendants = list(repo.revs('(%d::) - (%d)', rev, rev))
+        descendants = list(repo.revs(b'(%d::) - (%d)', rev, rev))
         alloworphaned = obsolete.isenabled(repo, obsolete.allowunstableopt)
-        if opts.get('rebase'):
+        if opts.get(b'rebase'):
             # Skip obsoleted descendants and their descendants so the rebase
             # won't cause conflicts for sure.
             torebase = list(
                 repo.revs(
-                    '%ld - (%ld & obsolete())::', descendants, descendants
+                    b'%ld - (%ld & obsolete())::', descendants, descendants
                 )
             )
             if not alloworphaned and len(torebase) != len(descendants):
                 raise error.Abort(
-                    _('split would leave orphaned changesets ' 'behind')
+                    _(b'split would leave orphaned changesets ' b'behind')
                 )
         else:
             if not alloworphaned and descendants:
                 raise error.Abort(
-                    _('cannot split changeset with children without rebase')
+                    _(b'cannot split changeset with children without rebase')
                 )
             torebase = ()
 
         if len(ctx.parents()) > 1:
-            raise error.Abort(_('cannot split a merge changeset'))
+            raise error.Abort(_(b'cannot split a merge changeset'))
 
         cmdutil.bailifchanged(repo)
 
@@ -127,7 +127,7 @@
         if bname and repo._bookmarks[bname] != ctx.node():
             bookmarks.deactivate(repo)
 
-        wnode = repo['.'].node()
+        wnode = repo[b'.'].node()
         top = None
         try:
             top = dosplit(ui, repo, tr, ctx, opts)
@@ -158,37 +158,37 @@
     while incomplete(repo):
         if committed:
             header = _(
-                'HG: Splitting %s. So far it has been split into:\n'
+                b'HG: Splitting %s. So far it has been split into:\n'
             ) % short(ctx.node())
             for c in committed:
-                firstline = c.description().split('\n', 1)[0]
-                header += _('HG: - %s: %s\n') % (short(c.node()), firstline)
+                firstline = c.description().split(b'\n', 1)[0]
+                header += _(b'HG: - %s: %s\n') % (short(c.node()), firstline)
             header += _(
-                'HG: Write commit message for the next split ' 'changeset.\n'
+                b'HG: Write commit message for the next split ' b'changeset.\n'
             )
         else:
             header = _(
-                'HG: Splitting %s. Write commit message for the '
-                'first split changeset.\n'
+                b'HG: Splitting %s. Write commit message for the '
+                b'first split changeset.\n'
             ) % short(ctx.node())
         opts.update(
             {
-                'edit': True,
-                'interactive': True,
-                'message': header + ctx.description(),
+                b'edit': True,
+                b'interactive': True,
+                b'message': header + ctx.description(),
             }
         )
         commands.commit(ui, repo, **pycompat.strkwargs(opts))
-        newctx = repo['.']
+        newctx = repo[b'.']
         committed.append(newctx)
 
     if not committed:
-        raise error.Abort(_('cannot split an empty revision'))
+        raise error.Abort(_(b'cannot split an empty revision'))
 
     scmutil.cleanupnodes(
         repo,
         {ctx.node(): [c.node() for c in committed]},
-        operation='split',
+        operation=b'split',
         fixphase=True,
     )
 
@@ -199,6 +199,6 @@
     rebase.rebase(
         ui,
         repo,
-        rev=[revsetlang.formatspec('%ld', src)],
-        dest=revsetlang.formatspec('%d', destctx.rev()),
+        rev=[revsetlang.formatspec(b'%ld', src)],
+        dest=revsetlang.formatspec(b'%d', destctx.rev()),
     )
--- a/hgext/sqlitestore.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/sqlitestore.py	Sun Oct 06 09:48:39 2019 -0400
@@ -89,9 +89,9 @@
 
 # experimental config: storage.sqlite.compression
 configitem(
-    'storage',
-    'sqlite.compression',
-    default='zstd' if zstd else 'zlib',
+    b'storage',
+    b'sqlite.compression',
+    default=b'zstd' if zstd else b'zlib',
     experimental=True,
 )
 
@@ -99,7 +99,7 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 REQUIREMENT = b'exp-sqlite-001'
 REQUIREMENT_ZSTD = b'exp-sqlite-comp-001=zstd'
@@ -224,7 +224,7 @@
             delta = zlib.decompress(delta)
         else:
             raise SQLiteStoreError(
-                'unhandled compression type: %d' % compression
+                b'unhandled compression type: %d' % compression
             )
 
         deltas.append(delta)
@@ -319,7 +319,7 @@
 
         self._compengine = compression
 
-        if compression == 'zstd':
+        if compression == b'zstd':
             self._cctx = zstd.ZstdCompressor(level=3)
             self._dctx = zstd.ZstdDecompressor()
         else:
@@ -358,7 +358,7 @@
 
             if i != rev:
                 raise SQLiteStoreError(
-                    _('sqlite database has inconsistent ' 'revision numbers')
+                    _(b'sqlite database has inconsistent ' b'revision numbers')
                 )
 
             if p1rev == nullrev:
@@ -411,7 +411,7 @@
             return nullid, nullid
 
         if node not in self._revisions:
-            raise error.LookupError(node, self._path, _('no node'))
+            raise error.LookupError(node, self._path, _(b'no node'))
 
         entry = self._revisions[node]
         return entry.p1node, entry.p2node
@@ -431,7 +431,7 @@
             return nullrev
 
         if node not in self._nodetorev:
-            raise error.LookupError(node, self._path, _('no node'))
+            raise error.LookupError(node, self._path, _(b'no node'))
 
         return self._nodetorev[node]
 
@@ -532,7 +532,7 @@
             node = self.node(node)
 
         if node not in self._nodetorev:
-            raise error.LookupError(node, self._path, _('no node'))
+            raise error.LookupError(node, self._path, _(b'no node'))
 
         if node in self._revisioncache:
             return self._revisioncache[node]
@@ -586,9 +586,9 @@
         assumehaveparentrevisions=False,
         deltamode=repository.CG_DELTAMODE_STD,
     ):
-        if nodesorder not in ('nodes', 'storage', 'linear', None):
+        if nodesorder not in (b'nodes', b'storage', b'linear', None):
             raise error.ProgrammingError(
-                'unhandled value for nodesorder: %s' % nodesorder
+                b'unhandled value for nodesorder: %s' % nodesorder
             )
 
         nodes = [n for n in nodes if n != nullid]
@@ -649,7 +649,7 @@
         cachedelta=None,
     ):
         if flags:
-            raise SQLiteStoreError(_('flags not supported on revisions'))
+            raise SQLiteStoreError(_(b'flags not supported on revisions'))
 
         validatehash = node is not None
         node = node or storageutil.hashrevisionsha1(revisiondata, p1, p2)
@@ -684,7 +684,7 @@
                 storeflags |= FLAG_CENSORED
 
             if wireflags & ~repository.REVISION_FLAG_CENSORED:
-                raise SQLiteStoreError('unhandled revision flag')
+                raise SQLiteStoreError(b'unhandled revision flag')
 
             if maybemissingparents:
                 if p1 != nullid and not self.hasnode(p1):
@@ -700,7 +700,7 @@
             # If base is censored, delta must be full replacement in a single
             # patch operation.
             if baserev != nullrev and self.iscensored(baserev):
-                hlen = struct.calcsize('>lll')
+                hlen = struct.calcsize(b'>lll')
                 oldlen = len(self.rawdata(deltabase, _verifyhash=False))
                 newlen = len(delta) - hlen
 
@@ -772,7 +772,7 @@
         # SQLite, since columns can be resized at will.
         if len(tombstone) > len(self.rawdata(censornode)):
             raise error.Abort(
-                _('censor tombstone must be no longer than ' 'censored data')
+                _(b'censor tombstone must be no longer than ' b'censored data')
             )
 
         # We need to replace the censored revision's data with the tombstone.
@@ -811,18 +811,18 @@
 
             deltahash = hashlib.sha1(fulltext).digest()
 
-            if self._compengine == 'zstd':
+            if self._compengine == b'zstd':
                 deltablob = self._cctx.compress(fulltext)
                 compression = COMPRESSION_ZSTD
-            elif self._compengine == 'zlib':
+            elif self._compengine == b'zlib':
                 deltablob = zlib.compress(fulltext)
                 compression = COMPRESSION_ZLIB
-            elif self._compengine == 'none':
+            elif self._compengine == b'none':
                 deltablob = fulltext
                 compression = COMPRESSION_NONE
             else:
                 raise error.ProgrammingError(
-                    'unhandled compression engine: %s' % self._compengine
+                    b'unhandled compression engine: %s' % self._compengine
                 )
 
             if len(deltablob) >= len(fulltext):
@@ -904,28 +904,28 @@
         d = {}
 
         if exclusivefiles:
-            d['exclusivefiles'] = []
+            d[b'exclusivefiles'] = []
 
         if sharedfiles:
             # TODO list sqlite file(s) here.
-            d['sharedfiles'] = []
+            d[b'sharedfiles'] = []
 
         if revisionscount:
-            d['revisionscount'] = len(self)
+            d[b'revisionscount'] = len(self)
 
         if trackedsize:
-            d['trackedsize'] = sum(
+            d[b'trackedsize'] = sum(
                 len(self.revision(node)) for node in self._nodetorev
             )
 
         if storedsize:
             # TODO implement this?
-            d['storedsize'] = None
+            d[b'storedsize'] = None
 
         return d
 
     def verifyintegrity(self, state):
-        state['skipread'] = set()
+        state[b'skipread'] = set()
 
         for rev in self:
             node = self.node(rev)
@@ -934,10 +934,10 @@
                 self.revision(node)
             except Exception as e:
                 yield sqliteproblem(
-                    error=_('unpacking %s: %s') % (short(node), e), node=node
+                    error=_(b'unpacking %s: %s') % (short(node), e), node=node
                 )
 
-                state['skipread'].add(node)
+                state[b'skipread'].add(node)
 
     # End of ifilestorage interface.
 
@@ -956,7 +956,7 @@
         if storageutil.iscensoredtext(fulltext):
             raise error.CensoredNodeError(self._path, node, fulltext)
 
-        raise SQLiteStoreError(_('integrity check failed on %s') % self._path)
+        raise SQLiteStoreError(_(b'integrity check failed on %s') % self._path)
 
     def _addrawrevision(
         self,
@@ -1008,18 +1008,18 @@
         # first.
         deltahash = hashlib.sha1(delta).digest()
 
-        if self._compengine == 'zstd':
+        if self._compengine == b'zstd':
             deltablob = self._cctx.compress(delta)
             compression = COMPRESSION_ZSTD
-        elif self._compengine == 'zlib':
+        elif self._compengine == b'zlib':
             deltablob = zlib.compress(delta)
             compression = COMPRESSION_ZLIB
-        elif self._compengine == 'none':
+        elif self._compengine == b'none':
             deltablob = delta
             compression = COMPRESSION_NONE
         else:
             raise error.ProgrammingError(
-                'unhandled compression engine: %s' % self._compengine
+                b'unhandled compression engine: %s' % self._compengine
             )
 
         # Don't store compressed data if it isn't practical.
@@ -1095,7 +1095,7 @@
         def committransaction(_):
             self._dbconn.commit()
 
-        tr.addfinalize('sqlitestore', committransaction)
+        tr.addfinalize(b'sqlitestore', committransaction)
 
         return tr
 
@@ -1110,7 +1110,7 @@
             if self._db[0] == tid:
                 return self._db[1]
 
-        db = makedb(self.svfs.join('db.sqlite'))
+        db = makedb(self.svfs.join(b'db.sqlite'))
         self._db = (tid, db)
 
         return db
@@ -1135,7 +1135,7 @@
         pass
 
     else:
-        raise error.Abort(_('sqlite database has unrecognized version'))
+        raise error.Abort(_(b'sqlite database has unrecognized version'))
 
     db.execute(r'PRAGMA journal_mode=WAL')
 
@@ -1155,65 +1155,65 @@
 
 
 def newreporequirements(orig, ui, createopts):
-    if createopts['backend'] != 'sqlite':
+    if createopts[b'backend'] != b'sqlite':
         return orig(ui, createopts)
 
     # This restriction can be lifted once we have more confidence.
-    if 'sharedrepo' in createopts:
+    if b'sharedrepo' in createopts:
         raise error.Abort(
-            _('shared repositories not supported with SQLite ' 'store')
+            _(b'shared repositories not supported with SQLite ' b'store')
         )
 
     # This filtering is out of an abundance of caution: we want to ensure
     # we honor creation options and we do that by annotating exactly the
     # creation options we recognize.
     known = {
-        'narrowfiles',
-        'backend',
-        'shallowfilestore',
+        b'narrowfiles',
+        b'backend',
+        b'shallowfilestore',
     }
 
     unsupported = set(createopts) - known
     if unsupported:
         raise error.Abort(
-            _('SQLite store does not support repo creation ' 'option: %s')
-            % ', '.join(sorted(unsupported))
+            _(b'SQLite store does not support repo creation ' b'option: %s')
+            % b', '.join(sorted(unsupported))
         )
 
     # Since we're a hybrid store that still relies on revlogs, we fall back
     # to using the revlogv1 backend's storage requirements then adding our
     # own requirement.
-    createopts['backend'] = 'revlogv1'
+    createopts[b'backend'] = b'revlogv1'
     requirements = orig(ui, createopts)
     requirements.add(REQUIREMENT)
 
-    compression = ui.config('storage', 'sqlite.compression')
+    compression = ui.config(b'storage', b'sqlite.compression')
 
-    if compression == 'zstd' and not zstd:
+    if compression == b'zstd' and not zstd:
         raise error.Abort(
             _(
-                'storage.sqlite.compression set to "zstd" but '
-                'zstandard compression not available to this '
-                'Mercurial install'
+                b'storage.sqlite.compression set to "zstd" but '
+                b'zstandard compression not available to this '
+                b'Mercurial install'
             )
         )
 
-    if compression == 'zstd':
+    if compression == b'zstd':
         requirements.add(REQUIREMENT_ZSTD)
-    elif compression == 'zlib':
+    elif compression == b'zlib':
         requirements.add(REQUIREMENT_ZLIB)
-    elif compression == 'none':
+    elif compression == b'none':
         requirements.add(REQUIREMENT_NONE)
     else:
         raise error.Abort(
             _(
-                'unknown compression engine defined in '
-                'storage.sqlite.compression: %s'
+                b'unknown compression engine defined in '
+                b'storage.sqlite.compression: %s'
             )
             % compression
         )
 
-    if createopts.get('shallowfilestore'):
+    if createopts.get(b'shallowfilestore'):
         requirements.add(REQUIREMENT_SHALLOW_FILES)
 
     return requirements
@@ -1228,16 +1228,16 @@
             path = path[1:]
 
         if REQUIREMENT_ZSTD in self.requirements:
-            compression = 'zstd'
+            compression = b'zstd'
         elif REQUIREMENT_ZLIB in self.requirements:
-            compression = 'zlib'
+            compression = b'zlib'
         elif REQUIREMENT_NONE in self.requirements:
-            compression = 'none'
+            compression = b'none'
         else:
             raise error.Abort(
                 _(
-                    'unable to determine what compression engine '
-                    'to use for SQLite storage'
+                    b'unable to determine what compression engine '
+                    b'to use for SQLite storage'
                 )
             )
 
@@ -1260,8 +1260,8 @@
         if REQUIREMENT_ZSTD in requirements and not zstd:
             raise error.Abort(
                 _(
-                    'repository uses zstandard compression, which '
-                    'is not available to this Mercurial install'
+                    b'repository uses zstandard compression, which '
+                    b'is not available to this Mercurial install'
                 )
             )
 
@@ -1281,11 +1281,11 @@
 def extsetup(ui):
     localrepo.featuresetupfuncs.add(featuresetup)
     extensions.wrapfunction(
-        localrepo, 'newreporequirements', newreporequirements
+        localrepo, b'newreporequirements', newreporequirements
     )
-    extensions.wrapfunction(localrepo, 'makefilestorage', makefilestorage)
-    extensions.wrapfunction(localrepo, 'makemain', makemain)
-    extensions.wrapfunction(verify.verifier, '__init__', verifierinit)
+    extensions.wrapfunction(localrepo, b'makefilestorage', makefilestorage)
+    extensions.wrapfunction(localrepo, b'makemain', makemain)
+    extensions.wrapfunction(verify.verifier, b'__init__', verifierinit)
 
 
 def reposetup(ui, repo):
--- a/hgext/strip.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/strip.py	Sun Oct 06 09:48:39 2019 -0400
@@ -30,7 +30,7 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 
 def checklocalchanges(repo, force=False):
@@ -48,14 +48,14 @@
     currentbranch = repo[None].branch()
 
     if (
-        util.safehasattr(repo, 'mq')
+        util.safehasattr(repo, b'mq')
         and p2 != nullid
         and p2 in [x.node for x in repo.mq.applied]
     ):
         unode = p2
     elif currentbranch != repo[unode].branch():
-        pwdir = 'parents(wdir())'
-        revset = 'max(((parents(%ln::%r) + %r) - %ln::%r) and branch(%s))'
+        pwdir = b'parents(wdir())'
+        revset = b'max(((parents(%ln::%r) + %r) - %ln::%r) and branch(%s))'
         branchtarget = repo.revs(
             revset, nodes, pwdir, pwdir, nodes, pwdir, currentbranch
         )
@@ -91,61 +91,66 @@
 
         repomarks = repo._bookmarks
         if bookmarks:
-            with repo.transaction('strip') as tr:
+            with repo.transaction(b'strip') as tr:
                 if repo._activebookmark in bookmarks:
                     bookmarksmod.deactivate(repo)
                 repomarks.applychanges(repo, tr, [(b, None) for b in bookmarks])
             for bookmark in sorted(bookmarks):
-                ui.write(_("bookmark '%s' deleted\n") % bookmark)
+                ui.write(_(b"bookmark '%s' deleted\n") % bookmark)
 
 
 @command(
-    "strip",
+    b"strip",
     [
         (
-            'r',
-            'rev',
+            b'r',
+            b'rev',
             [],
             _(
-                'strip specified revision (optional, '
-                'can specify revisions without this '
-                'option)'
+                b'strip specified revision (optional, '
+                b'can specify revisions without this '
+                b'option)'
             ),
-            _('REV'),
+            _(b'REV'),
         ),
         (
-            'f',
-            'force',
+            b'f',
+            b'force',
             None,
             _(
-                'force removal of changesets, discard '
-                'uncommitted changes (no backup)'
+                b'force removal of changesets, discard '
+                b'uncommitted changes (no backup)'
             ),
         ),
-        ('', 'no-backup', None, _('do not save backup bundle')),
-        ('', 'nobackup', None, _('do not save backup bundle ' '(DEPRECATED)')),
-        ('n', '', None, _('ignored  (DEPRECATED)')),
+        (b'', b'no-backup', None, _(b'do not save backup bundle')),
         (
-            'k',
-            'keep',
+            b'',
+            b'nobackup',
             None,
-            _("do not modify working directory during " "strip"),
+            _(b'do not save backup bundle ' b'(DEPRECATED)'),
+        ),
+        (b'n', b'', None, _(b'ignored  (DEPRECATED)')),
+        (
+            b'k',
+            b'keep',
+            None,
+            _(b"do not modify working directory during " b"strip"),
         ),
         (
-            'B',
-            'bookmark',
+            b'B',
+            b'bookmark',
             [],
-            _("remove revs only reachable from given" " bookmark"),
-            _('BOOKMARK'),
+            _(b"remove revs only reachable from given" b" bookmark"),
+            _(b'BOOKMARK'),
         ),
         (
-            '',
-            'soft',
+            b'',
+            b'soft',
             None,
-            _("simply drop changesets from visible history (EXPERIMENTAL)"),
+            _(b"simply drop changesets from visible history (EXPERIMENTAL)"),
         ),
     ],
-    _('hg strip [-k] [-f] [-B bookmark] [-r] REV...'),
+    _(b'hg strip [-k] [-f] [-B bookmark] [-r] REV...'),
     helpcategory=command.CATEGORY_MAINTENANCE,
 )
 def stripcmd(ui, repo, *revs, **opts):
@@ -179,21 +184,21 @@
     """
     opts = pycompat.byteskwargs(opts)
     backup = True
-    if opts.get('no_backup') or opts.get('nobackup'):
+    if opts.get(b'no_backup') or opts.get(b'nobackup'):
         backup = False
 
     cl = repo.changelog
-    revs = list(revs) + opts.get('rev')
+    revs = list(revs) + opts.get(b'rev')
     revs = set(scmutil.revrange(repo, revs))
 
     with repo.wlock():
-        bookmarks = set(opts.get('bookmark'))
+        bookmarks = set(opts.get(b'bookmark'))
         if bookmarks:
             repomarks = repo._bookmarks
             if not bookmarks.issubset(repomarks):
                 raise error.Abort(
-                    _("bookmark '%s' not found")
-                    % ','.join(sorted(bookmarks - set(repomarks.keys())))
+                    _(b"bookmark '%s' not found")
+                    % b','.join(sorted(bookmarks - set(repomarks.keys())))
                 )
 
             # If the requested bookmark is not the only one pointing to a
@@ -207,14 +212,14 @@
                     rsrevs = scmutil.bookmarkrevs(repo, marks[0])
                     revs.update(set(rsrevs))
             if not revs:
-                with repo.lock(), repo.transaction('bookmark') as tr:
+                with repo.lock(), repo.transaction(b'bookmark') as tr:
                     bmchanges = [(b, None) for b in bookmarks]
                     repomarks.applychanges(repo, tr, bmchanges)
                 for bookmark in sorted(bookmarks):
-                    ui.write(_("bookmark '%s' deleted\n") % bookmark)
+                    ui.write(_(b"bookmark '%s' deleted\n") % bookmark)
 
         if not revs:
-            raise error.Abort(_('empty revision set'))
+            raise error.Abort(_(b'empty revision set'))
 
         descendants = set(cl.descendants(revs))
         strippedrevs = revs.union(descendants)
@@ -233,7 +238,7 @@
         if q is not None and q.applied:
             # refresh queue state if we're about to strip
             # applied patches
-            if cl.rev(repo.lookup('qtip')) in strippedrevs:
+            if cl.rev(repo.lookup(b'qtip')) in strippedrevs:
                 q.applieddirty = True
                 start = 0
                 end = len(q.applied)
@@ -247,7 +252,7 @@
                 q.savedirty()
 
         revs = sorted(rootnodes)
-        if update and opts.get('keep'):
+        if update and opts.get(b'keep'):
             urev = _findupdatetarget(repo, revs)
             uctx = repo[urev]
 
@@ -261,14 +266,14 @@
 
             # reset files that only changed in the dirstate too
             dirstate = repo.dirstate
-            dirchanges = [f for f in dirstate if dirstate[f] != 'n']
+            dirchanges = [f for f in dirstate if dirstate[f] != b'n']
             changedfiles.extend(dirchanges)
 
             repo.dirstate.rebuild(urev, uctx.manifest(), changedfiles)
             repo.dirstate.write(repo.currenttransaction())
 
             # clear resolve state
-            merge.mergestate.clean(repo, repo['.'].node())
+            merge.mergestate.clean(repo, repo[b'.'].node())
 
             update = False
 
@@ -278,9 +283,9 @@
             revs,
             backup=backup,
             update=update,
-            force=opts.get('force'),
+            force=opts.get(b'force'),
             bookmarks=bookmarks,
-            soft=opts['soft'],
+            soft=opts[b'soft'],
         )
 
     return 0
--- a/hgext/transplant.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/transplant.py	Sun Oct 06 09:48:39 2019 -0400
@@ -55,16 +55,16 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 configtable = {}
 configitem = registrar.configitem(configtable)
 
 configitem(
-    'transplant', 'filter', default=None,
+    b'transplant', b'filter', default=None,
 )
 configitem(
-    'transplant', 'log', default=None,
+    b'transplant', b'log', default=None,
 )
 
 
@@ -90,7 +90,7 @@
         abspath = os.path.join(self.path, self.transplantfile)
         if self.transplantfile and os.path.exists(abspath):
             for line in self.opener.read(self.transplantfile).splitlines():
-                lnode, rnode = map(revlog.bin, line.split(':'))
+                lnode, rnode = map(revlog.bin, line.split(b':'))
                 list = self.transplants.setdefault(rnode, [])
                 list.append(transplantentry(lnode, rnode))
 
@@ -98,11 +98,11 @@
         if self.dirty and self.transplantfile:
             if not os.path.isdir(self.path):
                 os.mkdir(self.path)
-            fp = self.opener(self.transplantfile, 'w')
+            fp = self.opener(self.transplantfile, b'w')
             for list in self.transplants.itervalues():
                 for t in list:
                     l, r = map(nodemod.hex, (t.lnode, t.rnode))
-                    fp.write(l + ':' + r + '\n')
+                    fp.write(l + b':' + r + b'\n')
             fp.close()
         self.dirty = False
 
@@ -124,14 +124,14 @@
 class transplanter(object):
     def __init__(self, ui, repo, opts):
         self.ui = ui
-        self.path = repo.vfs.join('transplant')
+        self.path = repo.vfs.join(b'transplant')
         self.opener = vfsmod.vfs(self.path)
         self.transplants = transplants(
-            self.path, 'transplants', opener=self.opener
+            self.path, b'transplants', opener=self.opener
         )
 
         def getcommiteditor():
-            editform = cmdutil.mergeeditform(repo[None], 'transplant')
+            editform = cmdutil.mergeeditform(repo[None], b'transplant')
             return cmdutil.getcommiteditor(
                 editform=editform, **pycompat.strkwargs(opts)
             )
@@ -175,19 +175,19 @@
         lock = tr = None
         try:
             lock = repo.lock()
-            tr = repo.transaction('transplant')
+            tr = repo.transaction(b'transplant')
             for rev in revs:
                 node = revmap[rev]
-                revstr = '%d:%s' % (rev, nodemod.short(node))
+                revstr = b'%d:%s' % (rev, nodemod.short(node))
 
                 if self.applied(repo, node, p1):
                     self.ui.warn(
-                        _('skipping already applied revision %s\n') % revstr
+                        _(b'skipping already applied revision %s\n') % revstr
                     )
                     continue
 
                 parents = source.changelog.parents(node)
-                if not (opts.get('filter') or opts.get('log')):
+                if not (opts.get(b'filter') or opts.get(b'log')):
                     # If the changeset parent is the same as the
                     # wdir's parent, just pull it.
                     if parents[0] == p1:
@@ -214,17 +214,17 @@
 
                 skipmerge = False
                 if parents[1] != revlog.nullid:
-                    if not opts.get('parent'):
+                    if not opts.get(b'parent'):
                         self.ui.note(
-                            _('skipping merge changeset %d:%s\n')
+                            _(b'skipping merge changeset %d:%s\n')
                             % (rev, nodemod.short(node))
                         )
                         skipmerge = True
                     else:
-                        parent = source.lookup(opts['parent'])
+                        parent = source.lookup(opts[b'parent'])
                         if parent not in parents:
                             raise error.Abort(
-                                _('%s is not a parent of %s')
+                                _(b'%s is not a parent of %s')
                                 % (nodemod.short(parent), nodemod.short(node))
                             )
                 else:
@@ -233,7 +233,7 @@
                 if skipmerge:
                     patchfile = None
                 else:
-                    fd, patchfile = pycompat.mkstemp(prefix='hg-transplant-')
+                    fd, patchfile = pycompat.mkstemp(prefix=b'hg-transplant-')
                     fp = os.fdopen(fd, r'wb')
                     gen = patch.diff(source, parent, node, opts=diffopts)
                     for chunk in gen:
@@ -250,8 +250,8 @@
                                 source.changelog.read(node),
                                 patchfile,
                                 merge=domerge,
-                                log=opts.get('log'),
-                                filter=opts.get('filter'),
+                                log=opts.get(b'log'),
+                                filter=opts.get(b'filter'),
                             )
                         except TransplantError:
                             # Do not rollback, it is up to the user to
@@ -260,12 +260,12 @@
                             raise
                         if n and domerge:
                             self.ui.status(
-                                _('%s merged at %s\n')
+                                _(b'%s merged at %s\n')
                                 % (revstr, nodemod.short(n))
                             )
                         elif n:
                             self.ui.status(
-                                _('%s transplanted to %s\n')
+                                _(b'%s transplanted to %s\n')
                                 % (nodemod.short(node), nodemod.short(n))
                             )
                     finally:
@@ -286,33 +286,33 @@
     def filter(self, filter, node, changelog, patchfile):
         '''arbitrarily rewrite changeset before applying it'''
 
-        self.ui.status(_('filtering %s\n') % patchfile)
+        self.ui.status(_(b'filtering %s\n') % patchfile)
         user, date, msg = (changelog[1], changelog[2], changelog[4])
-        fd, headerfile = pycompat.mkstemp(prefix='hg-transplant-')
+        fd, headerfile = pycompat.mkstemp(prefix=b'hg-transplant-')
         fp = os.fdopen(fd, r'wb')
-        fp.write("# HG changeset patch\n")
-        fp.write("# User %s\n" % user)
-        fp.write("# Date %d %d\n" % date)
-        fp.write(msg + '\n')
+        fp.write(b"# HG changeset patch\n")
+        fp.write(b"# User %s\n" % user)
+        fp.write(b"# Date %d %d\n" % date)
+        fp.write(msg + b'\n')
         fp.close()
 
         try:
             self.ui.system(
-                '%s %s %s'
+                b'%s %s %s'
                 % (
                     filter,
                     procutil.shellquote(headerfile),
                     procutil.shellquote(patchfile),
                 ),
                 environ={
-                    'HGUSER': changelog[1],
-                    'HGREVISION': nodemod.hex(node),
+                    b'HGUSER': changelog[1],
+                    b'HGREVISION': nodemod.hex(node),
                 },
                 onerr=error.Abort,
-                errprefix=_('filter failed'),
-                blockedtag='transplant_filter',
+                errprefix=_(b'filter failed'),
+                blockedtag=b'transplant_filter',
             )
-            user, date, msg = self.parselog(open(headerfile, 'rb'))[1:4]
+            user, date, msg = self.parselog(open(headerfile, b'rb'))[1:4]
         finally:
             os.unlink(headerfile)
 
@@ -323,37 +323,37 @@
     ):
         '''apply the patch in patchfile to the repository as a transplant'''
         (manifest, user, (time, timezone), files, message) = cl[:5]
-        date = "%d %d" % (time, timezone)
-        extra = {'transplant_source': node}
+        date = b"%d %d" % (time, timezone)
+        extra = {b'transplant_source': node}
         if filter:
             (user, date, message) = self.filter(filter, node, cl, patchfile)
 
         if log:
             # we don't translate messages inserted into commits
-            message += '\n(transplanted from %s)' % nodemod.hex(node)
+            message += b'\n(transplanted from %s)' % nodemod.hex(node)
 
-        self.ui.status(_('applying %s\n') % nodemod.short(node))
-        self.ui.note('%s %s\n%s\n' % (user, date, message))
+        self.ui.status(_(b'applying %s\n') % nodemod.short(node))
+        self.ui.note(b'%s %s\n%s\n' % (user, date, message))
 
         if not patchfile and not merge:
-            raise error.Abort(_('can only omit patchfile if merging'))
+            raise error.Abort(_(b'can only omit patchfile if merging'))
         if patchfile:
             try:
                 files = set()
                 patch.patch(self.ui, repo, patchfile, files=files, eolmode=None)
                 files = list(files)
             except Exception as inst:
-                seriespath = os.path.join(self.path, 'series')
+                seriespath = os.path.join(self.path, b'series')
                 if os.path.exists(seriespath):
                     os.unlink(seriespath)
                 p1 = repo.dirstate.p1()
                 p2 = node
                 self.log(user, date, message, p1, p2, merge=merge)
-                self.ui.write(stringutil.forcebytestr(inst) + '\n')
+                self.ui.write(stringutil.forcebytestr(inst) + b'\n')
                 raise TransplantError(
                     _(
-                        'fix up the working directory and run '
-                        'hg transplant --continue'
+                        b'fix up the working directory and run '
+                        b'hg transplant --continue'
                     )
                 )
         else:
@@ -375,7 +375,7 @@
         )
         if not n:
             self.ui.warn(
-                _('skipping emptied changeset %s\n') % nodemod.short(node)
+                _(b'skipping emptied changeset %s\n') % nodemod.short(node)
             )
             return None
         if not merge:
@@ -384,22 +384,23 @@
         return n
 
     def canresume(self):
-        return os.path.exists(os.path.join(self.path, 'journal'))
+        return os.path.exists(os.path.join(self.path, b'journal'))
 
     def resume(self, repo, source, opts):
         '''recover last transaction and apply remaining changesets'''
-        if os.path.exists(os.path.join(self.path, 'journal')):
+        if os.path.exists(os.path.join(self.path, b'journal')):
             n, node = self.recover(repo, source, opts)
             if n:
                 self.ui.status(
-                    _('%s transplanted as %s\n')
+                    _(b'%s transplanted as %s\n')
                     % (nodemod.short(node), nodemod.short(n))
                 )
             else:
                 self.ui.status(
-                    _('%s skipped due to empty diff\n') % (nodemod.short(node),)
+                    _(b'%s skipped due to empty diff\n')
+                    % (nodemod.short(node),)
                 )
-        seriespath = os.path.join(self.path, 'series')
+        seriespath = os.path.join(self.path, b'series')
         if not os.path.exists(seriespath):
             self.transplants.write()
             return
@@ -417,26 +418,26 @@
         merge = False
 
         if not user or not date or not message or not parents[0]:
-            raise error.Abort(_('transplant log file is corrupt'))
+            raise error.Abort(_(b'transplant log file is corrupt'))
 
         parent = parents[0]
         if len(parents) > 1:
-            if opts.get('parent'):
-                parent = source.lookup(opts['parent'])
+            if opts.get(b'parent'):
+                parent = source.lookup(opts[b'parent'])
                 if parent not in parents:
                     raise error.Abort(
-                        _('%s is not a parent of %s')
+                        _(b'%s is not a parent of %s')
                         % (nodemod.short(parent), nodemod.short(node))
                     )
             else:
                 merge = True
 
-        extra = {'transplant_source': node}
+        extra = {b'transplant_source': node}
         try:
             p1 = repo.dirstate.p1()
             if p1 != parent:
                 raise error.Abort(
-                    _('working directory not at transplant ' 'parent %s')
+                    _(b'working directory not at transplant ' b'parent %s')
                     % nodemod.hex(parent)
                 )
             if merge:
@@ -451,7 +452,7 @@
                     editor=self.getcommiteditor(),
                 )
                 if not n:
-                    raise error.Abort(_('commit failed'))
+                    raise error.Abort(_(b'commit failed'))
                 if not merge:
                     self.transplants.set(n, node)
             else:
@@ -467,11 +468,11 @@
     def stop(self, ui, repo):
         """logic to stop an interrupted transplant"""
         if self.canresume():
-            startctx = repo['.']
+            startctx = repo[b'.']
             hg.updaterepo(repo, startctx.node(), overwrite=True)
-            ui.status(_("stopped the interrupted transplant\n"))
+            ui.status(_(b"stopped the interrupted transplant\n"))
             ui.status(
-                _("working directory is now at %s\n") % startctx.hex()[:12]
+                _(b"working directory is now at %s\n") % startctx.hex()[:12]
             )
             self.unlog()
             return 0
@@ -480,8 +481,8 @@
         nodes = []
         merges = []
         cur = nodes
-        for line in self.opener.read('series').splitlines():
-            if line.startswith('# Merges'):
+        for line in self.opener.read(b'series').splitlines():
+            if line.startswith(b'# Merges'):
                 cur = merges
                 continue
             cur.append(revlog.bin(line))
@@ -494,13 +495,13 @@
 
         if not os.path.isdir(self.path):
             os.mkdir(self.path)
-        series = self.opener('series', 'w')
+        series = self.opener(b'series', b'w')
         for rev in sorted(revmap):
-            series.write(nodemod.hex(revmap[rev]) + '\n')
+            series.write(nodemod.hex(revmap[rev]) + b'\n')
         if merges:
-            series.write('# Merges\n')
+            series.write(b'# Merges\n')
             for m in merges:
-                series.write(nodemod.hex(m) + '\n')
+                series.write(nodemod.hex(m) + b'\n')
         series.close()
 
     def parselog(self, fp):
@@ -513,42 +514,44 @@
         for line in fp.read().splitlines():
             if inmsg:
                 message.append(line)
-            elif line.startswith('# User '):
+            elif line.startswith(b'# User '):
                 user = line[7:]
-            elif line.startswith('# Date '):
+            elif line.startswith(b'# Date '):
                 date = line[7:]
-            elif line.startswith('# Node ID '):
+            elif line.startswith(b'# Node ID '):
                 node = revlog.bin(line[10:])
-            elif line.startswith('# Parent '):
+            elif line.startswith(b'# Parent '):
                 parents.append(revlog.bin(line[9:]))
-            elif not line.startswith('# '):
+            elif not line.startswith(b'# '):
                 inmsg = True
                 message.append(line)
         if None in (user, date):
-            raise error.Abort(_("filter corrupted changeset (no user or date)"))
-        return (node, user, date, '\n'.join(message), parents)
+            raise error.Abort(
+                _(b"filter corrupted changeset (no user or date)")
+            )
+        return (node, user, date, b'\n'.join(message), parents)
 
     def log(self, user, date, message, p1, p2, merge=False):
         '''journal changelog metadata for later recover'''
 
         if not os.path.isdir(self.path):
             os.mkdir(self.path)
-        fp = self.opener('journal', 'w')
-        fp.write('# User %s\n' % user)
-        fp.write('# Date %s\n' % date)
-        fp.write('# Node ID %s\n' % nodemod.hex(p2))
-        fp.write('# Parent ' + nodemod.hex(p1) + '\n')
+        fp = self.opener(b'journal', b'w')
+        fp.write(b'# User %s\n' % user)
+        fp.write(b'# Date %s\n' % date)
+        fp.write(b'# Node ID %s\n' % nodemod.hex(p2))
+        fp.write(b'# Parent ' + nodemod.hex(p1) + b'\n')
         if merge:
-            fp.write('# Parent ' + nodemod.hex(p2) + '\n')
-        fp.write(message.rstrip() + '\n')
+            fp.write(b'# Parent ' + nodemod.hex(p2) + b'\n')
+        fp.write(message.rstrip() + b'\n')
         fp.close()
 
     def readlog(self):
-        return self.parselog(self.opener('journal'))
+        return self.parselog(self.opener(b'journal'))
 
     def unlog(self):
         '''remove changelog journal'''
-        absdst = os.path.join(self.path, 'journal')
+        absdst = os.path.join(self.path, b'journal')
         if os.path.exists(absdst):
             os.unlink(absdst)
 
@@ -559,7 +562,7 @@
             if source.changelog.parents(node)[1] != revlog.nullid:
                 return False
             extra = source.changelog.read(node)[5]
-            cnode = extra.get('transplant_source')
+            cnode = extra.get(b'transplant_source')
             if cnode and self.applied(repo, cnode, root):
                 return False
             return True
@@ -580,37 +583,37 @@
     transplants = []
     merges = []
     prompt = _(
-        'apply changeset? [ynmpcq?]:'
-        '$$ &yes, transplant this changeset'
-        '$$ &no, skip this changeset'
-        '$$ &merge at this changeset'
-        '$$ show &patch'
-        '$$ &commit selected changesets'
-        '$$ &quit and cancel transplant'
-        '$$ &? (show this help)'
+        b'apply changeset? [ynmpcq?]:'
+        b'$$ &yes, transplant this changeset'
+        b'$$ &no, skip this changeset'
+        b'$$ &merge at this changeset'
+        b'$$ show &patch'
+        b'$$ &commit selected changesets'
+        b'$$ &quit and cancel transplant'
+        b'$$ &? (show this help)'
     )
     for node in nodes:
         displayer.show(repo[node])
         action = None
         while not action:
             choice = ui.promptchoice(prompt)
-            action = 'ynmpcq?'[choice : choice + 1]
-            if action == '?':
+            action = b'ynmpcq?'[choice : choice + 1]
+            if action == b'?':
                 for c, t in ui.extractchoices(prompt)[1]:
-                    ui.write('%s: %s\n' % (c, t))
+                    ui.write(b'%s: %s\n' % (c, t))
                 action = None
-            elif action == 'p':
+            elif action == b'p':
                 parent = repo.changelog.parents(node)[0]
                 for chunk in patch.diff(repo, parent, node):
                     ui.write(chunk)
                 action = None
-        if action == 'y':
+        if action == b'y':
             transplants.append(node)
-        elif action == 'm':
+        elif action == b'm':
             merges.append(node)
-        elif action == 'c':
+        elif action == b'c':
             break
-        elif action == 'q':
+        elif action == b'q':
             transplants = ()
             merges = ()
             break
@@ -619,37 +622,58 @@
 
 
 @command(
-    'transplant',
+    b'transplant',
     [
-        ('s', 'source', '', _('transplant changesets from REPO'), _('REPO')),
-        ('b', 'branch', [], _('use this source changeset as head'), _('REV')),
+        (
+            b's',
+            b'source',
+            b'',
+            _(b'transplant changesets from REPO'),
+            _(b'REPO'),
+        ),
         (
-            'a',
-            'all',
+            b'b',
+            b'branch',
+            [],
+            _(b'use this source changeset as head'),
+            _(b'REV'),
+        ),
+        (
+            b'a',
+            b'all',
             None,
-            _('pull all changesets up to the --branch revisions'),
+            _(b'pull all changesets up to the --branch revisions'),
         ),
-        ('p', 'prune', [], _('skip over REV'), _('REV')),
-        ('m', 'merge', [], _('merge at REV'), _('REV')),
+        (b'p', b'prune', [], _(b'skip over REV'), _(b'REV')),
+        (b'm', b'merge', [], _(b'merge at REV'), _(b'REV')),
         (
-            '',
-            'parent',
-            '',
-            _('parent to choose when transplanting merge'),
-            _('REV'),
+            b'',
+            b'parent',
+            b'',
+            _(b'parent to choose when transplanting merge'),
+            _(b'REV'),
         ),
-        ('e', 'edit', False, _('invoke editor on commit messages')),
-        ('', 'log', None, _('append transplant info to log message')),
-        ('', 'stop', False, _('stop interrupted transplant')),
+        (b'e', b'edit', False, _(b'invoke editor on commit messages')),
+        (b'', b'log', None, _(b'append transplant info to log message')),
+        (b'', b'stop', False, _(b'stop interrupted transplant')),
+        (
+            b'c',
+            b'continue',
+            None,
+            _(b'continue last transplant session ' b'after fixing conflicts'),
+        ),
         (
-            'c',
-            'continue',
-            None,
-            _('continue last transplant session ' 'after fixing conflicts'),
+            b'',
+            b'filter',
+            b'',
+            _(b'filter changesets through command'),
+            _(b'CMD'),
         ),
-        ('', 'filter', '', _('filter changesets through command'), _('CMD')),
     ],
-    _('hg transplant [-s REPO] [-b BRANCH [-a]] [-p REV] ' '[-m REV] [REV]...'),
+    _(
+        b'hg transplant [-s REPO] [-b BRANCH [-a]] [-p REV] '
+        b'[-m REV] [REV]...'
+    ),
     helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
 )
 def transplant(ui, repo, *revs, **opts):
@@ -728,74 +752,74 @@
                 yield node
 
     def checkopts(opts, revs):
-        if opts.get('continue'):
-            if opts.get('branch') or opts.get('all') or opts.get('merge'):
+        if opts.get(b'continue'):
+            if opts.get(b'branch') or opts.get(b'all') or opts.get(b'merge'):
                 raise error.Abort(
                     _(
-                        '--continue is incompatible with '
-                        '--branch, --all and --merge'
+                        b'--continue is incompatible with '
+                        b'--branch, --all and --merge'
                     )
                 )
             return
-        if opts.get('stop'):
-            if opts.get('branch') or opts.get('all') or opts.get('merge'):
+        if opts.get(b'stop'):
+            if opts.get(b'branch') or opts.get(b'all') or opts.get(b'merge'):
                 raise error.Abort(
                     _(
-                        '--stop is incompatible with '
-                        '--branch, --all and --merge'
+                        b'--stop is incompatible with '
+                        b'--branch, --all and --merge'
                     )
                 )
             return
         if not (
-            opts.get('source')
+            opts.get(b'source')
             or revs
-            or opts.get('merge')
-            or opts.get('branch')
+            or opts.get(b'merge')
+            or opts.get(b'branch')
         ):
             raise error.Abort(
                 _(
-                    'no source URL, branch revision, or revision '
-                    'list provided'
+                    b'no source URL, branch revision, or revision '
+                    b'list provided'
                 )
             )
-        if opts.get('all'):
-            if not opts.get('branch'):
-                raise error.Abort(_('--all requires a branch revision'))
+        if opts.get(b'all'):
+            if not opts.get(b'branch'):
+                raise error.Abort(_(b'--all requires a branch revision'))
             if revs:
                 raise error.Abort(
-                    _('--all is incompatible with a ' 'revision list')
+                    _(b'--all is incompatible with a ' b'revision list')
                 )
 
     opts = pycompat.byteskwargs(opts)
     checkopts(opts, revs)
 
-    if not opts.get('log'):
+    if not opts.get(b'log'):
         # deprecated config: transplant.log
-        opts['log'] = ui.config('transplant', 'log')
-    if not opts.get('filter'):
+        opts[b'log'] = ui.config(b'transplant', b'log')
+    if not opts.get(b'filter'):
         # deprecated config: transplant.filter
-        opts['filter'] = ui.config('transplant', 'filter')
+        opts[b'filter'] = ui.config(b'transplant', b'filter')
 
     tp = transplanter(ui, repo, opts)
 
     p1 = repo.dirstate.p1()
     if len(repo) > 0 and p1 == revlog.nullid:
-        raise error.Abort(_('no revision checked out'))
-    if opts.get('continue'):
+        raise error.Abort(_(b'no revision checked out'))
+    if opts.get(b'continue'):
         if not tp.canresume():
-            raise error.Abort(_('no transplant to continue'))
-    elif opts.get('stop'):
+            raise error.Abort(_(b'no transplant to continue'))
+    elif opts.get(b'stop'):
         if not tp.canresume():
-            raise error.Abort(_('no interrupted transplant found'))
+            raise error.Abort(_(b'no interrupted transplant found'))
         return tp.stop(ui, repo)
     else:
         cmdutil.checkunfinished(repo)
         cmdutil.bailifchanged(repo)
 
-    sourcerepo = opts.get('source')
+    sourcerepo = opts.get(b'source')
     if sourcerepo:
         peer = hg.peer(repo, opts, ui.expandpath(sourcerepo))
-        heads = pycompat.maplist(peer.lookup, opts.get('branch', ()))
+        heads = pycompat.maplist(peer.lookup, opts.get(b'branch', ()))
         target = set(heads)
         for r in revs:
             try:
@@ -807,36 +831,36 @@
         )
     else:
         source = repo
-        heads = pycompat.maplist(source.lookup, opts.get('branch', ()))
+        heads = pycompat.maplist(source.lookup, opts.get(b'branch', ()))
         cleanupfn = None
 
     try:
-        if opts.get('continue'):
+        if opts.get(b'continue'):
             tp.resume(repo, source, opts)
             return
 
         tf = tp.transplantfilter(repo, source, p1)
-        if opts.get('prune'):
+        if opts.get(b'prune'):
             prune = set(
                 source[r].node()
-                for r in scmutil.revrange(source, opts.get('prune'))
+                for r in scmutil.revrange(source, opts.get(b'prune'))
             )
             matchfn = lambda x: tf(x) and x not in prune
         else:
             matchfn = tf
-        merges = pycompat.maplist(source.lookup, opts.get('merge', ()))
+        merges = pycompat.maplist(source.lookup, opts.get(b'merge', ()))
         revmap = {}
         if revs:
             for r in scmutil.revrange(source, revs):
                 revmap[int(r)] = source[r].node()
-        elif opts.get('all') or not merges:
+        elif opts.get(b'all') or not merges:
             if source != repo:
                 alltransplants = incwalk(source, csets, match=matchfn)
             else:
                 alltransplants = transplantwalk(
                     source, p1, heads, match=matchfn
                 )
-            if opts.get('all'):
+            if opts.get(b'all'):
                 revs = alltransplants
             else:
                 revs, newmerges = browserevs(ui, source, alltransplants, opts)
@@ -863,7 +887,7 @@
 revsetpredicate = registrar.revsetpredicate()
 
 
-@revsetpredicate('transplanted([set])')
+@revsetpredicate(b'transplanted([set])')
 def revsettransplanted(repo, subset, x):
     """Transplanted changesets in set, or all transplanted changesets.
     """
@@ -872,33 +896,33 @@
     else:
         s = subset
     return smartset.baseset(
-        [r for r in s if repo[r].extra().get('transplant_source')]
+        [r for r in s if repo[r].extra().get(b'transplant_source')]
     )
 
 
 templatekeyword = registrar.templatekeyword()
 
 
-@templatekeyword('transplanted', requires={'ctx'})
+@templatekeyword(b'transplanted', requires={b'ctx'})
 def kwtransplanted(context, mapping):
     """String. The node identifier of the transplanted
     changeset if any."""
-    ctx = context.resource(mapping, 'ctx')
-    n = ctx.extra().get('transplant_source')
-    return n and nodemod.hex(n) or ''
+    ctx = context.resource(mapping, b'ctx')
+    n = ctx.extra().get(b'transplant_source')
+    return n and nodemod.hex(n) or b''
 
 
 def extsetup(ui):
     statemod.addunfinished(
-        'transplant',
-        fname='transplant/journal',
+        b'transplant',
+        fname=b'transplant/journal',
         clearable=True,
         continuefunc=continuecmd,
         statushint=_(
-            'To continue:    hg transplant --continue\n'
-            'To stop:        hg transplant --stop'
+            b'To continue:    hg transplant --continue\n'
+            b'To stop:        hg transplant --stop'
         ),
-        cmdhint=_("use 'hg transplant --continue' or 'hg transplant --stop'"),
+        cmdhint=_(b"use 'hg transplant --continue' or 'hg transplant --stop'"),
     )
 
 
--- a/hgext/uncommit.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/uncommit.py	Sun Oct 06 09:48:39 2019 -0400
@@ -43,17 +43,17 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    'experimental', 'uncommitondirtywdir', default=False,
+    b'experimental', b'uncommitondirtywdir', default=False,
 )
 configitem(
-    'experimental', 'uncommit.keep', default=False,
+    b'experimental', b'uncommit.keep', default=False,
 )
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 
 def _commitfiltered(
@@ -96,7 +96,7 @@
         return mctx
 
     if not files:
-        repo.ui.status(_("note: keeping empty commit\n"))
+        repo.ui.status(_(b"note: keeping empty commit\n"))
 
     if message is None:
         message = ctx.description()
@@ -119,14 +119,14 @@
 
 
 @command(
-    'uncommit',
+    b'uncommit',
     [
-        ('', 'keep', None, _('allow an empty commit after uncommitting')),
+        (b'', b'keep', None, _(b'allow an empty commit after uncommitting')),
         (
-            '',
-            'allow-dirty-working-copy',
+            b'',
+            b'allow-dirty-working-copy',
             False,
-            _('allow uncommit with outstanding changes'),
+            _(b'allow uncommit with outstanding changes'),
         ),
         (b'n', b'note', b'', _(b'store a note on uncommit'), _(b'TEXT')),
     ]
@@ -134,7 +134,7 @@
     + commands.commitopts
     + commands.commitopts2
     + commands.commitopts3,
-    _('[OPTION]... [FILE]...'),
+    _(b'[OPTION]... [FILE]...'),
     helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
 )
 def uncommit(ui, repo, *pats, **opts):
@@ -158,17 +158,17 @@
         m, a, r, d = repo.status()[:4]
         isdirtypath = any(set(m + a + r + d) & set(pats))
         allowdirtywcopy = opts[
-            'allow_dirty_working_copy'
-        ] or repo.ui.configbool('experimental', 'uncommitondirtywdir')
+            b'allow_dirty_working_copy'
+        ] or repo.ui.configbool(b'experimental', b'uncommitondirtywdir')
         if not allowdirtywcopy and (not pats or isdirtypath):
             cmdutil.bailifchanged(
                 repo,
-                hint=_('requires ' '--allow-dirty-working-copy to uncommit'),
+                hint=_(b'requires ' b'--allow-dirty-working-copy to uncommit'),
             )
-        old = repo['.']
-        rewriteutil.precheck(repo, [old.rev()], 'uncommit')
+        old = repo[b'.']
+        rewriteutil.precheck(repo, [old.rev()], b'uncommit')
         if len(old.parents()) > 1:
-            raise error.Abort(_("cannot uncommit merge changeset"))
+            raise error.Abort(_(b"cannot uncommit merge changeset"))
 
         match = scmutil.match(old, pats, opts)
 
@@ -200,17 +200,19 @@
                     hint=hint,
                 )
 
-        with repo.transaction('uncommit'):
+        with repo.transaction(b'uncommit'):
             if not (opts[b'message'] or opts[b'logfile']):
                 opts[b'message'] = old.description()
             message = cmdutil.logmessage(ui, opts)
 
             keepcommit = pats
             if not keepcommit:
-                if opts.get('keep') is not None:
-                    keepcommit = opts.get('keep')
+                if opts.get(b'keep') is not None:
+                    keepcommit = opts.get(b'keep')
                 else:
-                    keepcommit = ui.configbool('experimental', 'uncommit.keep')
+                    keepcommit = ui.configbool(
+                        b'experimental', b'uncommit.keep'
+                    )
             newid = _commitfiltered(
                 repo,
                 old,
@@ -221,7 +223,7 @@
                 date=opts.get(b'date'),
             )
             if newid is None:
-                ui.status(_("nothing to uncommit\n"))
+                ui.status(_(b"nothing to uncommit\n"))
                 return 1
 
             mapping = {}
@@ -235,7 +237,7 @@
             with repo.dirstate.parentchange():
                 scmutil.movedirstate(repo, repo[newid], match)
 
-            scmutil.cleanupnodes(repo, mapping, 'uncommit', fixphase=True)
+            scmutil.cleanupnodes(repo, mapping, b'uncommit', fixphase=True)
 
 
 def predecessormarkers(ctx):
@@ -245,7 +247,7 @@
 
 
 @command(
-    'unamend',
+    b'unamend',
     [],
     helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
     helpbasic=True,
@@ -260,17 +262,17 @@
     """
 
     unfi = repo.unfiltered()
-    with repo.wlock(), repo.lock(), repo.transaction('unamend'):
+    with repo.wlock(), repo.lock(), repo.transaction(b'unamend'):
 
         # identify the commit from which to unamend
-        curctx = repo['.']
+        curctx = repo[b'.']
 
-        rewriteutil.precheck(repo, [curctx.rev()], 'unamend')
+        rewriteutil.precheck(repo, [curctx.rev()], b'unamend')
 
         # identify the commit to which to unamend
         markers = list(predecessormarkers(curctx))
         if len(markers) != 1:
-            e = _("changeset must have one predecessor, found %i predecessors")
+            e = _(b"changeset must have one predecessor, found %i predecessors")
             raise error.Abort(e % len(markers))
 
         prednode = markers[0].prednode()
@@ -279,7 +281,7 @@
         # add an extra so that we get a new hash
         # note: allowing unamend to undo an unamend is an intentional feature
         extras = predctx.extra()
-        extras['unamend_source'] = curctx.hex()
+        extras[b'unamend_source'] = curctx.hex()
 
         def filectxfn(repo, ctx_, path):
             try:
@@ -306,4 +308,4 @@
             scmutil.movedirstate(repo, newpredctx)
 
         mapping = {curctx.node(): (newprednode,)}
-        scmutil.cleanupnodes(repo, mapping, 'unamend', fixphase=True)
+        scmutil.cleanupnodes(repo, mapping, b'unamend', fixphase=True)
--- a/hgext/win32mbcs.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/win32mbcs.py	Sun Oct 06 09:48:39 2019 -0400
@@ -61,7 +61,7 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 configtable = {}
 configitem = registrar.configitem(configtable)
@@ -69,7 +69,7 @@
 # Encoding.encoding may be updated by --encoding option.
 # Use a lambda do delay the resolution.
 configitem(
-    'win32mbcs', 'encoding', default=lambda: encoding.encoding,
+    b'win32mbcs', b'encoding', default=lambda: encoding.encoding,
 )
 
 _encoding = None  # see extsetup
@@ -80,7 +80,7 @@
         uarg = arg.decode(_encoding)
         if arg == uarg.encode(_encoding):
             return uarg
-        raise UnicodeError("Not local encoding")
+        raise UnicodeError(b"Not local encoding")
     elif isinstance(arg, tuple):
         return tuple(map(decode, arg))
     elif isinstance(arg, list):
@@ -110,7 +110,7 @@
         us = decode(s)
     except UnicodeError:
         us = s
-    if us and us[-1] not in ':/\\':
+    if us and us[-1] not in b':/\\':
         s += pycompat.ossep
     return s
 
@@ -127,7 +127,7 @@
         return enc(func(*dec(args), **dec(kwds)))
     except UnicodeError:
         raise error.Abort(
-            _("[win32mbcs] filename conversion failed with" " %s encoding\n")
+            _(b"[win32mbcs] filename conversion failed with" b" %s encoding\n")
             % _encoding
         )
 
@@ -146,13 +146,13 @@
     if args:
         args = list(args)
         args[0] = appendsep(args[0])
-    if 'path' in kwds:
-        kwds['path'] = appendsep(kwds['path'])
+    if b'path' in kwds:
+        kwds[b'path'] = appendsep(kwds[b'path'])
     return func(*args, **kwds)
 
 
 def wrapname(name, wrapper):
-    module, name = name.rsplit('.', 1)
+    module, name = name.rsplit(b'.', 1)
     module = sys.modules[module]
     func = getattr(module, name)
 
@@ -166,7 +166,7 @@
 # List of functions to be wrapped.
 # NOTE: os.path.dirname() and os.path.basename() are safe because
 #       they use result of os.path.split()
-funcs = '''os.path.join os.path.split os.path.splitext
+funcs = b'''os.path.join os.path.split os.path.splitext
  os.path.normpath os.makedirs mercurial.util.endswithsep
  mercurial.util.splitpath mercurial.util.fscasesensitive
  mercurial.util.fspath mercurial.util.pconvert mercurial.util.normpath
@@ -176,14 +176,14 @@
 # These functions are required to be called with local encoded string
 # because they expects argument is local encoded string and cause
 # problem with unicode string.
-rfuncs = '''mercurial.encoding.upper mercurial.encoding.lower
+rfuncs = b'''mercurial.encoding.upper mercurial.encoding.lower
  mercurial.util._filenamebytestr'''
 
 # List of Windows specific functions to be wrapped.
-winfuncs = '''os.path.splitunc'''
+winfuncs = b'''os.path.splitunc'''
 
 # codec and alias names of sjis and big5 to be faked.
-problematic_encodings = '''big5 big5-tw csbig5 big5hkscs big5-hkscs
+problematic_encodings = b'''big5 big5-tw csbig5 big5hkscs big5-hkscs
  hkscs cp932 932 ms932 mskanji ms-kanji shift_jis csshiftjis shiftjis
  sjis s_jis shift_jis_2004 shiftjis2004 sjis_2004 sjis2004
  shift_jisx0213 shiftjisx0213 sjisx0213 s_jisx0213 950 cp950 ms950 '''
@@ -192,13 +192,13 @@
 def extsetup(ui):
     # TODO: decide use of config section for this extension
     if (not os.path.supports_unicode_filenames) and (
-        pycompat.sysplatform != 'cygwin'
+        pycompat.sysplatform != b'cygwin'
     ):
-        ui.warn(_("[win32mbcs] cannot activate on this platform.\n"))
+        ui.warn(_(b"[win32mbcs] cannot activate on this platform.\n"))
         return
     # determine encoding for filename
     global _encoding
-    _encoding = ui.config('win32mbcs', 'encoding')
+    _encoding = ui.config(b'win32mbcs', b'encoding')
     # fake is only for relevant environment.
     if _encoding.lower() in problematic_encodings.split():
         for f in funcs.split():
@@ -206,13 +206,13 @@
         if pycompat.iswindows:
             for f in winfuncs.split():
                 wrapname(f, wrapper)
-        wrapname("mercurial.util.listdir", wrapperforlistdir)
-        wrapname("mercurial.windows.listdir", wrapperforlistdir)
+        wrapname(b"mercurial.util.listdir", wrapperforlistdir)
+        wrapname(b"mercurial.windows.listdir", wrapperforlistdir)
         # wrap functions to be called with local byte string arguments
         for f in rfuncs.split():
             wrapname(f, reversewrapper)
         # Check sys.args manually instead of using ui.debug() because
         # command line options is not yet applied when
         # extensions.loadall() is called.
-        if '--debug' in sys.argv:
-            ui.write("[win32mbcs] activated with encoding: %s\n" % _encoding)
+        if b'--debug' in sys.argv:
+            ui.write(b"[win32mbcs] activated with encoding: %s\n" % _encoding)
--- a/hgext/win32text.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/win32text.py	Sun Oct 06 09:48:39 2019 -0400
@@ -56,20 +56,20 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 configtable = {}
 configitem = registrar.configitem(configtable)
 
 configitem(
-    'win32text', 'warn', default=True,
+    b'win32text', b'warn', default=True,
 )
 
 # regexp for single LF without CR preceding.
-re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
+re_single_lf = re.compile(b'(^|[^\r])\n', re.MULTILINE)
 
-newlinestr = {'\r\n': 'CRLF', '\r': 'CR'}
-filterstr = {'\r\n': 'clever', '\r': 'mac'}
+newlinestr = {b'\r\n': b'CRLF', b'\r': b'CR'}
+filterstr = {b'\r\n': b'clever', b'\r': b'mac'}
 
 
 def checknewline(s, newline, ui=None, repo=None, filename=None):
@@ -80,32 +80,32 @@
     if newline in s and ui and filename and repo:
         ui.warn(
             _(
-                'WARNING: %s already has %s line endings\n'
-                'and does not need EOL conversion by the win32text plugin.\n'
-                'Before your next commit, please reconsider your '
-                'encode/decode settings in \nMercurial.ini or %s.\n'
+                b'WARNING: %s already has %s line endings\n'
+                b'and does not need EOL conversion by the win32text plugin.\n'
+                b'Before your next commit, please reconsider your '
+                b'encode/decode settings in \nMercurial.ini or %s.\n'
             )
-            % (filename, newlinestr[newline], repo.vfs.join('hgrc'))
+            % (filename, newlinestr[newline], repo.vfs.join(b'hgrc'))
         )
 
 
 def dumbdecode(s, cmd, **kwargs):
-    checknewline(s, '\r\n', **kwargs)
+    checknewline(s, b'\r\n', **kwargs)
     # replace single LF to CRLF
-    return re_single_lf.sub('\\1\r\n', s)
+    return re_single_lf.sub(b'\\1\r\n', s)
 
 
 def dumbencode(s, cmd):
-    return s.replace('\r\n', '\n')
+    return s.replace(b'\r\n', b'\n')
 
 
 def macdumbdecode(s, cmd, **kwargs):
-    checknewline(s, '\r', **kwargs)
-    return s.replace('\n', '\r')
+    checknewline(s, b'\r', **kwargs)
+    return s.replace(b'\n', b'\r')
 
 
 def macdumbencode(s, cmd):
-    return s.replace('\r', '\n')
+    return s.replace(b'\r', b'\n')
 
 
 def cleverdecode(s, cmd, **kwargs):
@@ -133,14 +133,14 @@
 
 
 _filters = {
-    'dumbdecode:': dumbdecode,
-    'dumbencode:': dumbencode,
-    'cleverdecode:': cleverdecode,
-    'cleverencode:': cleverencode,
-    'macdumbdecode:': macdumbdecode,
-    'macdumbencode:': macdumbencode,
-    'macdecode:': macdecode,
-    'macencode:': macencode,
+    b'dumbdecode:': dumbdecode,
+    b'dumbencode:': dumbencode,
+    b'cleverdecode:': cleverdecode,
+    b'cleverencode:': cleverencode,
+    b'macdumbdecode:': macdumbdecode,
+    b'macdumbencode:': macdumbencode,
+    b'macdecode:': macdecode,
+    b'macencode:': macencode,
 }
 
 
@@ -152,7 +152,7 @@
     # newest version as canonical. this prevents us from blocking a
     # changegroup that contains an unacceptable commit followed later
     # by a commit that fixes the problem.
-    tip = repo['tip']
+    tip = repo[b'tip']
     for rev in pycompat.xrange(
         repo.changelog.tiprev(), repo[node].rev() - 1, -1
     ):
@@ -166,32 +166,32 @@
                 if not halt:
                     ui.warn(
                         _(
-                            'attempt to commit or push text file(s) '
-                            'using %s line endings\n'
+                            b'attempt to commit or push text file(s) '
+                            b'using %s line endings\n'
                         )
                         % newlinestr[newline]
                     )
-                ui.warn(_('in %s: %s\n') % (short(c.node()), f))
+                ui.warn(_(b'in %s: %s\n') % (short(c.node()), f))
                 halt = True
-    if halt and hooktype == 'pretxnchangegroup':
+    if halt and hooktype == b'pretxnchangegroup':
         crlf = newlinestr[newline].lower()
         filter = filterstr[newline]
         ui.warn(
             _(
-                '\nTo prevent this mistake in your local repository,\n'
-                'add to Mercurial.ini or .hg/hgrc:\n'
-                '\n'
-                '[hooks]\n'
-                'pretxncommit.%s = python:hgext.win32text.forbid%s\n'
-                '\n'
-                'and also consider adding:\n'
-                '\n'
-                '[extensions]\n'
-                'win32text =\n'
-                '[encode]\n'
-                '** = %sencode:\n'
-                '[decode]\n'
-                '** = %sdecode:\n'
+                b'\nTo prevent this mistake in your local repository,\n'
+                b'add to Mercurial.ini or .hg/hgrc:\n'
+                b'\n'
+                b'[hooks]\n'
+                b'pretxncommit.%s = python:hgext.win32text.forbid%s\n'
+                b'\n'
+                b'and also consider adding:\n'
+                b'\n'
+                b'[extensions]\n'
+                b'win32text =\n'
+                b'[encode]\n'
+                b'** = %sencode:\n'
+                b'[decode]\n'
+                b'** = %sdecode:\n'
             )
             % (crlf, crlf, filter, filter)
         )
@@ -199,11 +199,11 @@
 
 
 def forbidcrlf(ui, repo, hooktype, node, **kwargs):
-    return forbidnewline(ui, repo, hooktype, node, '\r\n', **kwargs)
+    return forbidnewline(ui, repo, hooktype, node, b'\r\n', **kwargs)
 
 
 def forbidcr(ui, repo, hooktype, node, **kwargs):
-    return forbidnewline(ui, repo, hooktype, node, '\r', **kwargs)
+    return forbidnewline(ui, repo, hooktype, node, b'\r', **kwargs)
 
 
 def reposetup(ui, repo):
@@ -215,10 +215,10 @@
 
 def extsetup(ui):
     # deprecated config: win32text.warn
-    if ui.configbool('win32text', 'warn'):
+    if ui.configbool(b'win32text', b'warn'):
         ui.warn(
             _(
-                "win32text is deprecated: "
-                "https://mercurial-scm.org/wiki/Win32TextExtension\n"
+                b"win32text is deprecated: "
+                b"https://mercurial-scm.org/wiki/Win32TextExtension\n"
             )
         )
--- a/hgext/zeroconf/Zeroconf.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/zeroconf/Zeroconf.py	Sun Oct 06 09:48:39 2019 -0400
@@ -76,9 +76,9 @@
                  ensure names end in '.local.'
                  timeout on receiving socket for clean shutdown"""
 
-__author__ = "Paul Scott-Murphy"
-__email__ = "paul at scott dash murphy dot com"
-__version__ = "0.12"
+__author__ = b"Paul Scott-Murphy"
+__email__ = b"paul at scott dash murphy dot com"
+__version__ = b"0.12"
 
 import errno
 import itertools
@@ -91,11 +91,11 @@
 
 from mercurial import pycompat
 
-__all__ = ["Zeroconf", "ServiceInfo", "ServiceBrowser"]
+__all__ = [b"Zeroconf", b"ServiceInfo", b"ServiceBrowser"]
 
 # hook for threads
 
-globals()['_GLOBAL_DONE'] = 0
+globals()[b'_GLOBAL_DONE'] = 0
 
 # Some timing constants
 
@@ -160,34 +160,34 @@
 # Mapping constants to names
 
 _CLASSES = {
-    _CLASS_IN: "in",
-    _CLASS_CS: "cs",
-    _CLASS_CH: "ch",
-    _CLASS_HS: "hs",
-    _CLASS_NONE: "none",
-    _CLASS_ANY: "any",
+    _CLASS_IN: b"in",
+    _CLASS_CS: b"cs",
+    _CLASS_CH: b"ch",
+    _CLASS_HS: b"hs",
+    _CLASS_NONE: b"none",
+    _CLASS_ANY: b"any",
 }
 
 _TYPES = {
-    _TYPE_A: "a",
-    _TYPE_NS: "ns",
-    _TYPE_MD: "md",
-    _TYPE_MF: "mf",
-    _TYPE_CNAME: "cname",
-    _TYPE_SOA: "soa",
-    _TYPE_MB: "mb",
-    _TYPE_MG: "mg",
-    _TYPE_MR: "mr",
-    _TYPE_NULL: "null",
-    _TYPE_WKS: "wks",
-    _TYPE_PTR: "ptr",
-    _TYPE_HINFO: "hinfo",
-    _TYPE_MINFO: "minfo",
-    _TYPE_MX: "mx",
-    _TYPE_TXT: "txt",
-    _TYPE_AAAA: "quada",
-    _TYPE_SRV: "srv",
-    _TYPE_ANY: "any",
+    _TYPE_A: b"a",
+    _TYPE_NS: b"ns",
+    _TYPE_MD: b"md",
+    _TYPE_MF: b"mf",
+    _TYPE_CNAME: b"cname",
+    _TYPE_SOA: b"soa",
+    _TYPE_MB: b"mb",
+    _TYPE_MG: b"mg",
+    _TYPE_MR: b"mr",
+    _TYPE_NULL: b"null",
+    _TYPE_WKS: b"wks",
+    _TYPE_PTR: b"ptr",
+    _TYPE_HINFO: b"hinfo",
+    _TYPE_MINFO: b"minfo",
+    _TYPE_MX: b"mx",
+    _TYPE_TXT: b"txt",
+    _TYPE_AAAA: b"quada",
+    _TYPE_SRV: b"srv",
+    _TYPE_ANY: b"any",
 }
 
 # utility functions
@@ -223,7 +223,7 @@
 
 class BadDomainName(Exception):
     def __init__(self, pos):
-        Exception.__init__(self, "at position %s" % pos)
+        Exception.__init__(self, b"at position %s" % pos)
 
 
 class BadDomainNameCircular(BadDomainName):
@@ -262,31 +262,31 @@
         try:
             return _CLASSES[clazz]
         except KeyError:
-            return "?(%s)" % clazz
+            return b"?(%s)" % clazz
 
     def getType(self, type):
         """Type accessor"""
         try:
             return _TYPES[type]
         except KeyError:
-            return "?(%s)" % type
+            return b"?(%s)" % type
 
     def toString(self, hdr, other):
         """String representation with additional information"""
-        result = "%s[%s,%s" % (
+        result = b"%s[%s,%s" % (
             hdr,
             self.getType(self.type),
             self.getClazz(self.clazz),
         )
         if self.unique:
-            result += "-unique,"
+            result += b"-unique,"
         else:
-            result += ","
+            result += b","
         result += self.name
         if other is not None:
-            result += ",%s]" % other
+            result += b",%s]" % other
         else:
-            result += "]"
+            result += b"]"
         return result
 
 
@@ -296,7 +296,7 @@
     def __init__(self, name, type, clazz):
         if pycompat.ispy3 and isinstance(name, str):
             name = name.encode('ascii')
-        if not name.endswith(".local."):
+        if not name.endswith(b".local."):
             raise NonLocalNameException(name)
         DNSEntry.__init__(self, name, type, clazz)
 
@@ -310,7 +310,7 @@
 
     def __repr__(self):
         """String representation"""
-        return DNSEntry.toString(self, "question", None)
+        return DNSEntry.toString(self, b"question", None)
 
 
 class DNSRecord(DNSEntry):
@@ -371,12 +371,12 @@
 
     def toString(self, other):
         """String representation with additional information"""
-        arg = "%s/%s,%s" % (
+        arg = b"%s/%s,%s" % (
             self.ttl,
             self.getRemainingTTL(currentTimeMillis()),
             other,
         )
-        return DNSEntry.toString(self, "record", arg)
+        return DNSEntry.toString(self, b"record", arg)
 
 
 class DNSAddress(DNSRecord):
@@ -425,7 +425,7 @@
 
     def __repr__(self):
         """String representation"""
-        return self.cpu + " " + self.os
+        return self.cpu + b" " + self.os
 
 
 class DNSPointer(DNSRecord):
@@ -470,7 +470,7 @@
     def __repr__(self):
         """String representation"""
         if len(self.text) > 10:
-            return self.toString(self.text[:7] + "...")
+            return self.toString(self.text[:7] + b"...")
         else:
             return self.toString(self.text)
 
@@ -505,7 +505,7 @@
 
     def __repr__(self):
         """String representation"""
-        return self.toString("%s:%s" % (self.server, self.port))
+        return self.toString(b"%s:%s" % (self.server, self.port))
 
 
 class DNSIncoming(object):
@@ -528,7 +528,7 @@
 
     def readHeader(self):
         """Reads header portion of packet"""
-        format = '!HHHHHH'
+        format = b'!HHHHHH'
         length = struct.calcsize(format)
         info = struct.unpack(
             format, self.data[self.offset : self.offset + length]
@@ -544,7 +544,7 @@
 
     def readQuestions(self):
         """Reads questions section of packet"""
-        format = '!HH'
+        format = b'!HH'
         length = struct.calcsize(format)
         for i in range(0, self.numquestions):
             name = self.readName()
@@ -561,7 +561,7 @@
 
     def readInt(self):
         """Reads an integer from the packet"""
-        format = '!I'
+        format = b'!I'
         length = struct.calcsize(format)
         info = struct.unpack(
             format, self.data[self.offset : self.offset + length]
@@ -577,7 +577,7 @@
 
     def readString(self, len):
         """Reads a string of a given length from the packet"""
-        format = '!%ds' % len
+        format = b'!%ds' % len
         length = struct.calcsize(format)
         info = struct.unpack(
             format, self.data[self.offset : self.offset + length]
@@ -587,7 +587,7 @@
 
     def readUnsignedShort(self):
         """Reads an unsigned short from the packet"""
-        format = '!H'
+        format = b'!H'
         length = struct.calcsize(format)
         info = struct.unpack(
             format, self.data[self.offset : self.offset + length]
@@ -597,7 +597,7 @@
 
     def readOthers(self):
         """Reads answers, authorities and additionals section of the packet"""
-        format = '!HHiH'
+        format = b'!HHiH'
         length = struct.calcsize(format)
         n = self.numanswers + self.numauthorities + self.numadditionals
         for i in range(0, n):
@@ -746,31 +746,31 @@
 
     def writeByte(self, value):
         """Writes a single byte to the packet"""
-        format = '!c'
+        format = b'!c'
         self.data.append(struct.pack(format, chr(value)))
         self.size += 1
 
     def insertShort(self, index, value):
         """Inserts an unsigned short in a certain position in the packet"""
-        format = '!H'
+        format = b'!H'
         self.data.insert(index, struct.pack(format, value))
         self.size += 2
 
     def writeShort(self, value):
         """Writes an unsigned short to the packet"""
-        format = '!H'
+        format = b'!H'
         self.data.append(struct.pack(format, value))
         self.size += 2
 
     def writeInt(self, value):
         """Writes an unsigned integer to the packet"""
-        format = '!I'
+        format = b'!I'
         self.data.append(struct.pack(format, int(value)))
         self.size += 4
 
     def writeString(self, value, length):
         """Writes a string to the packet"""
-        format = '!' + str(length) + 's'
+        format = b'!' + str(length) + b's'
         self.data.append(struct.pack(format, value))
         self.size += length
 
@@ -796,8 +796,8 @@
             # for future pointers to it.
             #
             self.names[name] = self.size
-            parts = name.split('.')
-            if parts[-1] == '':
+            parts = name.split(b'.')
+            if parts[-1] == b'':
                 parts = parts[:-1]
             for part in parts:
                 self.writeUTF(part)
@@ -835,7 +835,7 @@
         record.write(self)
         self.size -= 2
 
-        length = len(''.join(self.data[index:]))
+        length = len(b''.join(self.data[index:]))
         self.insertShort(index, length)  # Here is the short we adjusted for
 
     def packet(self):
@@ -863,7 +863,7 @@
                 self.insertShort(0, 0)
             else:
                 self.insertShort(0, self.id)
-        return ''.join(self.data)
+        return b''.join(self.data)
 
 
 class DNSCache(object):
@@ -939,7 +939,7 @@
         self.start()
 
     def run(self):
-        while not globals()['_GLOBAL_DONE']:
+        while not globals()[b'_GLOBAL_DONE']:
             rs = self.getReaders()
             if len(rs) == 0:
                 # No sockets to manage, but we wait for the timeout
@@ -955,7 +955,7 @@
                         try:
                             self.readers[sock].handle_read()
                         except Exception:
-                            if not globals()['_GLOBAL_DONE']:
+                            if not globals()[b'_GLOBAL_DONE']:
                                 traceback.print_exc()
                 except Exception:
                     pass
@@ -1035,7 +1035,7 @@
     def run(self):
         while True:
             self.zeroconf.wait(10 * 1000)
-            if globals()['_GLOBAL_DONE']:
+            if globals()[b'_GLOBAL_DONE']:
                 return
             now = currentTimeMillis()
             for record in self.zeroconf.cache.entries():
@@ -1108,7 +1108,7 @@
             now = currentTimeMillis()
             if len(self.list) == 0 and self.nexttime > now:
                 self.zeroconf.wait(self.nexttime - now)
-            if globals()['_GLOBAL_DONE'] or self.done:
+            if globals()[b'_GLOBAL_DONE'] or self.done:
                 return
             now = currentTimeMillis()
 
@@ -1174,24 +1174,24 @@
         if isinstance(properties, dict):
             self.properties = properties
             list = []
-            result = ''
+            result = b''
             for key in properties:
                 value = properties[key]
                 if value is None:
-                    suffix = ''
+                    suffix = b''
                 elif isinstance(value, str):
                     suffix = value
                 elif isinstance(value, int):
                     if value:
-                        suffix = 'true'
+                        suffix = b'true'
                     else:
-                        suffix = 'false'
+                        suffix = b'false'
                 else:
-                    suffix = ''
-                list.append('='.join((key, suffix)))
+                    suffix = b''
+                list.append(b'='.join((key, suffix)))
             for item in list:
-                result = ''.join(
-                    (result, struct.pack('!c', chr(len(item))), item)
+                result = b''.join(
+                    (result, struct.pack(b'!c', chr(len(item))), item)
                 )
             self.text = result
         else:
@@ -1212,7 +1212,7 @@
                 index += length
 
             for s in strs:
-                eindex = s.find('=')
+                eindex = s.find(b'=')
                 if eindex == -1:
                     # No equals sign at all
                     key = s
@@ -1220,9 +1220,9 @@
                 else:
                     key = s[:eindex]
                     value = s[eindex + 1 :]
-                    if value == 'true':
+                    if value == b'true':
                         value = 1
-                    elif value == 'false' or not value:
+                    elif value == b'false' or not value:
                         value = 0
 
                 # Only update non-existent properties
@@ -1240,7 +1240,7 @@
 
     def getName(self):
         """Name accessor"""
-        if self.type is not None and self.name.endswith("." + self.type):
+        if self.type is not None and self.name.endswith(b"." + self.type):
             return self.name[: len(self.name) - len(self.type) - 1]
         return self.name
 
@@ -1368,19 +1368,19 @@
 
     def __repr__(self):
         """String representation"""
-        result = "service[%s,%s:%s," % (
+        result = b"service[%s,%s:%s," % (
             self.name,
             socket.inet_ntoa(self.getAddress()),
             self.port,
         )
         if self.text is None:
-            result += "None"
+            result += b"None"
         else:
             if len(self.text) < 20:
                 result += self.text
             else:
-                result += self.text[:17] + "..."
-        result += "]"
+                result += self.text[:17] + b"..."
+        result += b"]"
         return result
 
 
@@ -1393,12 +1393,12 @@
     def __init__(self, bindaddress=None):
         """Creates an instance of the Zeroconf class, establishing
         multicast communications, listening and reaping threads."""
-        globals()['_GLOBAL_DONE'] = 0
+        globals()[b'_GLOBAL_DONE'] = 0
         if bindaddress is None:
             self.intf = socket.gethostbyname(socket.gethostname())
         else:
             self.intf = bindaddress
-        self.group = ('', _MDNS_PORT)
+        self.group = (b'', _MDNS_PORT)
         self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
         try:
             self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
@@ -1414,8 +1414,8 @@
             # work as expected.
             #
             pass
-        self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_TTL, "\xff")
-        self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_LOOP, "\x01")
+        self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_TTL, b"\xff")
+        self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_LOOP, b"\x01")
         try:
             self.socket.bind(self.group)
         except Exception:
@@ -1442,10 +1442,10 @@
         self.reaper = Reaper(self)
 
     def isLoopback(self):
-        return self.intf.startswith("127.0.0.1")
+        return self.intf.startswith(b"127.0.0.1")
 
     def isLinklocal(self):
-        return self.intf.startswith("169.254.")
+        return self.intf.startswith(b"169.254.")
 
     def wait(self, timeout):
         """Calling thread waits for a given number of milliseconds or
@@ -1642,8 +1642,8 @@
                     and not record.isExpired(now)
                     and record.alias == info.name
                 ):
-                    if info.name.find('.') < 0:
-                        info.name = "%w.[%s:%d].%s" % (
+                    if info.name.find(b'.') < 0:
+                        info.name = b"%w.[%s:%d].%s" % (
                             info.name,
                             info.address,
                             info.port,
@@ -1726,14 +1726,14 @@
 
         for question in msg.questions:
             if question.type == _TYPE_PTR:
-                if question.name == "_services._dns-sd._udp.local.":
+                if question.name == b"_services._dns-sd._udp.local.":
                     for stype in self.servicetypes.keys():
                         if out is None:
                             out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
                         out.addAnswer(
                             msg,
                             DNSPointer(
-                                "_services._dns-sd._udp.local.",
+                                b"_services._dns-sd._udp.local.",
                                 _TYPE_PTR,
                                 _CLASS_IN,
                                 _DNS_TTL,
@@ -1833,8 +1833,8 @@
     def close(self):
         """Ends the background threads, and prevent this instance from
         servicing further queries."""
-        if globals()['_GLOBAL_DONE'] == 0:
-            globals()['_GLOBAL_DONE'] = 1
+        if globals()[b'_GLOBAL_DONE'] == 0:
+            globals()[b'_GLOBAL_DONE'] = 1
             self.notifyAll()
             self.engine.notify()
             self.unregisterAllServices()
@@ -1850,39 +1850,39 @@
 # query (for Zoe), and service unregistration.
 
 if __name__ == '__main__':
-    print("Multicast DNS Service Discovery for Python, version", __version__)
+    print(b"Multicast DNS Service Discovery for Python, version", __version__)
     r = Zeroconf()
-    print("1. Testing registration of a service...")
-    desc = {'version': '0.10', 'a': 'test value', 'b': 'another value'}
+    print(b"1. Testing registration of a service...")
+    desc = {b'version': b'0.10', b'a': b'test value', b'b': b'another value'}
     info = ServiceInfo(
-        "_http._tcp.local.",
-        "My Service Name._http._tcp.local.",
-        socket.inet_aton("127.0.0.1"),
+        b"_http._tcp.local.",
+        b"My Service Name._http._tcp.local.",
+        socket.inet_aton(b"127.0.0.1"),
         1234,
         0,
         0,
         desc,
     )
-    print("   Registering service...")
+    print(b"   Registering service...")
     r.registerService(info)
-    print("   Registration done.")
-    print("2. Testing query of service information...")
+    print(b"   Registration done.")
+    print(b"2. Testing query of service information...")
     print(
-        "   Getting ZOE service:",
-        str(r.getServiceInfo("_http._tcp.local.", "ZOE._http._tcp.local.")),
+        b"   Getting ZOE service:",
+        str(r.getServiceInfo(b"_http._tcp.local.", b"ZOE._http._tcp.local.")),
     )
-    print("   Query done.")
-    print("3. Testing query of own service...")
+    print(b"   Query done.")
+    print(b"3. Testing query of own service...")
     print(
-        "   Getting self:",
+        b"   Getting self:",
         str(
             r.getServiceInfo(
-                "_http._tcp.local.", "My Service Name._http._tcp.local."
+                b"_http._tcp.local.", b"My Service Name._http._tcp.local."
             )
         ),
     )
-    print("   Query done.")
-    print("4. Testing unregister of service information...")
+    print(b"   Query done.")
+    print(b"4. Testing unregister of service information...")
     r.unregisterService(info)
-    print("   Unregister done.")
+    print(b"   Unregister done.")
     r.close()
--- a/hgext/zeroconf/__init__.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/hgext/zeroconf/__init__.py	Sun Oct 06 09:48:39 2019 -0400
@@ -43,7 +43,7 @@
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
-testedwith = 'ships-with-hg-core'
+testedwith = b'ships-with-hg-core'
 
 # publish
 
@@ -99,11 +99,11 @@
 
     # advertise to browsers
     svc = Zeroconf.ServiceInfo(
-        '_http._tcp.local.',
+        b'_http._tcp.local.',
         pycompat.bytestr(name + r'._http._tcp.local.'),
         server=host,
         port=port,
-        properties={'description': desc, 'path': "/" + path},
+        properties={b'description': desc, b'path': b"/" + path},
         address=localip,
         weight=0,
         priority=0,
@@ -112,11 +112,11 @@
 
     # advertise to Mercurial clients
     svc = Zeroconf.ServiceInfo(
-        '_hg._tcp.local.',
+        b'_hg._tcp.local.',
         pycompat.bytestr(name + r'._hg._tcp.local.'),
         server=host,
         port=port,
-        properties={'description': desc, 'path': "/" + path},
+        properties={b'description': desc, b'path': b"/" + path},
         address=localip,
         weight=0,
         priority=0,
@@ -134,20 +134,20 @@
         # single repo
         with app._obtainrepo() as repo:
             name = app.reponame or os.path.basename(repo.root)
-            path = repo.ui.config("web", "prefix", "").strip('/')
-            desc = repo.ui.config("web", "description")
+            path = repo.ui.config(b"web", b"prefix", b"").strip(b'/')
+            desc = repo.ui.config(b"web", b"description")
             if not desc:
                 desc = name
         publish(name, desc, path, port)
     else:
         # webdir
-        prefix = app.ui.config("web", "prefix", "").strip('/') + '/'
+        prefix = app.ui.config(b"web", b"prefix", b"").strip(b'/') + b'/'
         for repo, path in repos:
             u = app.ui.copy()
-            u.readconfig(os.path.join(path, '.hg', 'hgrc'))
+            u.readconfig(os.path.join(path, b'.hg', b'hgrc'))
             name = os.path.basename(repo)
-            path = (prefix + repo).strip('/')
-            desc = u.config('web', 'description')
+            path = (prefix + repo).strip(b'/')
+            desc = u.config(b'web', b'description')
             if not desc:
                 desc = name
             publish(name, desc, path, port)
@@ -175,7 +175,7 @@
         return
     server = Zeroconf.Zeroconf(ip)
     l = listener()
-    Zeroconf.ServiceBrowser(server, "_hg._tcp.local.", l)
+    Zeroconf.ServiceBrowser(server, b"_hg._tcp.local.", l)
     time.sleep(1)
     server.close()
     for value in l.found.values():
@@ -189,7 +189,7 @@
 
 
 def config(orig, self, section, key, *args, **kwargs):
-    if section == "paths" and key.startswith("zc-"):
+    if section == b"paths" and key.startswith(b"zc-"):
         for name, path in getzcpaths():
             if name == key:
                 return path
@@ -198,14 +198,14 @@
 
 def configitems(orig, self, section, *args, **kwargs):
     repos = orig(self, section, *args, **kwargs)
-    if section == "paths":
+    if section == b"paths":
         repos += getzcpaths()
     return repos
 
 
 def configsuboptions(orig, self, section, name, *args, **kwargs):
     opt, sub = orig(self, section, name, *args, **kwargs)
-    if section == "paths" and name.startswith("zc-"):
+    if section == b"paths" and name.startswith(b"zc-"):
         # We have to find the URL in the zeroconf paths.  We can't cons up any
         # suboptions, so we use any that we found in the original config.
         for zcname, zcurl in getzcpaths():
@@ -232,10 +232,10 @@
             server.close()
 
 
-extensions.wrapfunction(dispatch, '_runcommand', cleanupafterdispatch)
+extensions.wrapfunction(dispatch, b'_runcommand', cleanupafterdispatch)
 
-extensions.wrapfunction(uimod.ui, 'config', config)
-extensions.wrapfunction(uimod.ui, 'configitems', configitems)
-extensions.wrapfunction(uimod.ui, 'configsuboptions', configsuboptions)
-extensions.wrapfunction(hg, 'defaultdest', defaultdest)
-extensions.wrapfunction(servermod, 'create_server', zc_create_server)
+extensions.wrapfunction(uimod.ui, b'config', config)
+extensions.wrapfunction(uimod.ui, b'configitems', configitems)
+extensions.wrapfunction(uimod.ui, b'configsuboptions', configsuboptions)
+extensions.wrapfunction(hg, b'defaultdest', defaultdest)
+extensions.wrapfunction(servermod, b'create_server', zc_create_server)
--- a/mercurial/archival.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/archival.py	Sun Oct 06 09:48:39 2019 -0400
@@ -43,7 +43,7 @@
         prefix = util.normpath(prefix)
     else:
         if not isinstance(dest, bytes):
-            raise ValueError('dest must be string if no prefix')
+            raise ValueError(b'dest must be string if no prefix')
         prefix = os.path.basename(dest)
         lower = prefix.lower()
         for sfx in exts.get(kind, []):
@@ -52,23 +52,23 @@
                 break
     lpfx = os.path.normpath(util.localpath(prefix))
     prefix = util.pconvert(lpfx)
-    if not prefix.endswith('/'):
-        prefix += '/'
+    if not prefix.endswith(b'/'):
+        prefix += b'/'
     # Drop the leading '.' path component if present, so Windows can read the
     # zip files (issue4634)
-    if prefix.startswith('./'):
+    if prefix.startswith(b'./'):
         prefix = prefix[2:]
-    if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
-        raise error.Abort(_('archive prefix contains illegal components'))
+    if prefix.startswith(b'../') or os.path.isabs(lpfx) or b'/../' in prefix:
+        raise error.Abort(_(b'archive prefix contains illegal components'))
     return prefix
 
 
 exts = {
-    'tar': ['.tar'],
-    'tbz2': ['.tbz2', '.tar.bz2'],
-    'tgz': ['.tgz', '.tar.gz'],
-    'zip': ['.zip'],
-    'txz': ['.txz', '.tar.xz'],
+    b'tar': [b'.tar'],
+    b'tbz2': [b'.tbz2', b'.tar.bz2'],
+    b'tgz': [b'.tgz', b'.tar.gz'],
+    b'zip': [b'.zip'],
+    b'txz': [b'.txz', b'.tar.xz'],
 }
 
 
@@ -108,22 +108,22 @@
     repo = ctx.repo()
 
     opts = {
-        'template': repo.ui.config(
-            'experimental', 'archivemetatemplate', _defaultmetatemplate
+        b'template': repo.ui.config(
+            b'experimental', b'archivemetatemplate', _defaultmetatemplate
         )
     }
 
     out = util.stringio()
 
-    fm = formatter.formatter(repo.ui, out, 'archive', opts)
+    fm = formatter.formatter(repo.ui, out, b'archive', opts)
     fm.startitem()
     fm.context(ctx=ctx)
     fm.data(root=_rootctx(repo).hex())
 
     if ctx.rev() is None:
-        dirty = ''
+        dirty = b''
         if ctx.dirty(missing=True):
-            dirty = '+'
+            dirty = b'+'
         fm.data(dirty=dirty)
     fm.end()
 
@@ -146,33 +146,33 @@
             gzip.GzipFile.__init__(self, *args, **kw)
 
         def _write_gzip_header(self):
-            self.fileobj.write('\037\213')  # magic header
-            self.fileobj.write('\010')  # compression method
+            self.fileobj.write(b'\037\213')  # magic header
+            self.fileobj.write(b'\010')  # compression method
             fname = self.name
-            if fname and fname.endswith('.gz'):
+            if fname and fname.endswith(b'.gz'):
                 fname = fname[:-3]
             flags = 0
             if fname:
                 flags = gzip.FNAME
             self.fileobj.write(pycompat.bytechr(flags))
             gzip.write32u(self.fileobj, int(self.timestamp))
-            self.fileobj.write('\002')
-            self.fileobj.write('\377')
+            self.fileobj.write(b'\002')
+            self.fileobj.write(b'\377')
             if fname:
-                self.fileobj.write(fname + '\000')
+                self.fileobj.write(fname + b'\000')
 
-    def __init__(self, dest, mtime, kind=''):
+    def __init__(self, dest, mtime, kind=b''):
         self.mtime = mtime
         self.fileobj = None
 
-        def taropen(mode, name='', fileobj=None):
-            if kind == 'gz':
+        def taropen(mode, name=b'', fileobj=None):
+            if kind == b'gz':
                 mode = mode[0:1]
                 if not fileobj:
-                    fileobj = open(name, mode + 'b')
+                    fileobj = open(name, mode + b'b')
                 gzfileobj = self.GzipFileWithTime(
                     name,
-                    pycompat.sysstr(mode + 'b'),
+                    pycompat.sysstr(mode + b'b'),
                     zlib.Z_BEST_COMPRESSION,
                     fileobj,
                     timestamp=mtime,
@@ -185,9 +185,9 @@
                 return tarfile.open(name, pycompat.sysstr(mode + kind), fileobj)
 
         if isinstance(dest, bytes):
-            self.z = taropen('w:', name=dest)
+            self.z = taropen(b'w:', name=dest)
         else:
-            self.z = taropen('w|', fileobj=dest)
+            self.z = taropen(b'w|', fileobj=dest)
 
     def addfile(self, name, mode, islink, data):
         name = pycompat.fsdecode(name)
@@ -246,7 +246,7 @@
         # without this will be extracted with unexpected timestamp,
         # if TZ is not configured as GMT
         i.extra += struct.pack(
-            '<hhBl',
+            b'<hhBl',
             0x5455,  # block type: "extended-timestamp"
             1 + 4,  # size of this block
             1,  # "modification time is present"
@@ -270,7 +270,7 @@
         if islink:
             self.opener.symlink(data, name)
             return
-        f = self.opener(name, "w", atomictemp=False)
+        f = self.opener(name, b"w", atomictemp=False)
         f.write(data)
         f.close()
         destfile = os.path.join(self.basedir, name)
@@ -283,13 +283,13 @@
 
 
 archivers = {
-    'files': fileit,
-    'tar': tarit,
-    'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
-    'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
-    'txz': lambda name, mtime: tarit(name, mtime, 'xz'),
-    'uzip': lambda name, mtime: zipit(name, mtime, False),
-    'zip': zipit,
+    b'files': fileit,
+    b'tar': tarit,
+    b'tbz2': lambda name, mtime: tarit(name, mtime, b'bz2'),
+    b'tgz': lambda name, mtime: tarit(name, mtime, b'gz'),
+    b'txz': lambda name, mtime: tarit(name, mtime, b'xz'),
+    b'uzip': lambda name, mtime: zipit(name, mtime, False),
+    b'zip': zipit,
 }
 
 
@@ -300,7 +300,7 @@
     kind,
     decode=True,
     match=None,
-    prefix='',
+    prefix=b'',
     mtime=None,
     subrepos=False,
 ):
@@ -323,12 +323,12 @@
     subrepos tells whether to include subrepos.
     '''
 
-    if kind == 'txz' and not pycompat.ispy3:
-        raise error.Abort(_('xz compression is only available in Python 3'))
+    if kind == b'txz' and not pycompat.ispy3:
+        raise error.Abort(_(b'xz compression is only available in Python 3'))
 
-    if kind == 'files':
+    if kind == b'files':
         if prefix:
-            raise error.Abort(_('cannot give prefix when archiving to files'))
+            raise error.Abort(_(b'cannot give prefix when archiving to files'))
     else:
         prefix = tidyprefix(dest, kind, prefix)
 
@@ -339,7 +339,7 @@
         archiver.addfile(prefix + name, mode, islink, data)
 
     if kind not in archivers:
-        raise error.Abort(_("unknown archive type '%s'") % kind)
+        raise error.Abort(_(b"unknown archive type '%s'") % kind)
 
     ctx = repo[node]
     archiver = archivers[kind](dest, mtime or ctx.date()[0])
@@ -347,8 +347,8 @@
     if not match:
         match = scmutil.matchall(repo)
 
-    if repo.ui.configbool("ui", "archivemeta"):
-        name = '.hg_archival.txt'
+    if repo.ui.configbool(b"ui", b"archivemeta"):
+        name = b'.hg_archival.txt'
         if match(name):
             write(name, 0o644, False, lambda: buildmetadata(ctx))
 
@@ -360,12 +360,12 @@
             repo, [ctx.rev()], scmutil.matchfiles(repo, files)
         )
         progress = repo.ui.makeprogress(
-            _('archiving'), unit=_('files'), total=total
+            _(b'archiving'), unit=_(b'files'), total=total
         )
         progress.update(0)
         for f in files:
             ff = ctx.flags(f)
-            write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, ctx[f].data)
+            write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, ctx[f].data)
             progress.increment(item=f)
         progress.complete()
 
@@ -373,11 +373,11 @@
         for subpath in sorted(ctx.substate):
             sub = ctx.workingsub(subpath)
             submatch = matchmod.subdirmatcher(subpath, match)
-            subprefix = prefix + subpath + '/'
+            subprefix = prefix + subpath + b'/'
             total += sub.archive(archiver, subprefix, submatch, decode)
 
     if total == 0:
-        raise error.Abort(_('no files match the archive pattern'))
+        raise error.Abort(_(b'no files match the archive pattern'))
 
     archiver.done()
     return total
--- a/mercurial/bookmarks.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/bookmarks.py	Sun Oct 06 09:48:39 2019 -0400
@@ -31,9 +31,9 @@
 # until 3.5, bookmarks.current was the advertised name, not
 # bookmarks.active, so we must use both to avoid breaking old
 # custom styles
-activebookmarklabel = 'bookmarks.active bookmarks.current'
+activebookmarklabel = b'bookmarks.active bookmarks.current'
 
-BOOKMARKS_IN_STORE_REQUIREMENT = 'bookmarksinstore'
+BOOKMARKS_IN_STORE_REQUIREMENT = b'bookmarksinstore'
 
 
 def bookmarksinstore(repo):
@@ -51,7 +51,9 @@
     bookmarks or the committed ones. Other extensions (like share)
     may need to tweak this behavior further.
     """
-    fp, pending = txnutil.trypending(repo.root, bookmarksvfs(repo), 'bookmarks')
+    fp, pending = txnutil.trypending(
+        repo.root, bookmarksvfs(repo), b'bookmarks'
+    )
     return fp
 
 
@@ -84,7 +86,7 @@
                     if not line:
                         continue
                     try:
-                        sha, refspec = line.split(' ', 1)
+                        sha, refspec = line.split(b' ', 1)
                         node = tonode(sha)
                         if node in nm:
                             refspec = encoding.tolocal(refspec)
@@ -103,11 +105,11 @@
                         # ValueError:
                         # - node in nm, for non-20-bytes entry
                         # - split(...), for string without ' '
-                        bookmarkspath = '.hg/bookmarks'
+                        bookmarkspath = b'.hg/bookmarks'
                         if bookmarksinstore(repo):
-                            bookmarkspath = '.hg/store/bookmarks'
+                            bookmarkspath = b'.hg/store/bookmarks'
                         repo.ui.warn(
-                            _('malformed line in %s: %r\n')
+                            _(b'malformed line in %s: %r\n')
                             % (bookmarkspath, pycompat.bytestr(line))
                         )
         except IOError as inst:
@@ -122,7 +124,7 @@
     @active.setter
     def active(self, mark):
         if mark is not None and mark not in self._refmap:
-            raise AssertionError('bookmark %s does not exist!' % mark)
+            raise AssertionError(b'bookmark %s does not exist!' % mark)
 
         self._active = mark
         self._aclean = False
@@ -186,7 +188,7 @@
     def applychanges(self, repo, tr, changes):
         """Apply a list of changes to bookmarks
         """
-        bmchanges = tr.changes.get('bookmarks')
+        bmchanges = tr.changes.get(b'bookmarks')
         for name, node in changes:
             old = self._refmap.get(name)
             if node is None:
@@ -205,11 +207,11 @@
         """record that bookmarks have been changed in a transaction
 
         The transaction is then responsible for updating the file content."""
-        location = '' if bookmarksinstore(self._repo) else 'plain'
+        location = b'' if bookmarksinstore(self._repo) else b'plain'
         tr.addfilegenerator(
-            'bookmarks', ('bookmarks',), self._write, location=location
+            b'bookmarks', (b'bookmarks',), self._write, location=location
         )
-        tr.hookargs['bookmark_moved'] = '1'
+        tr.hookargs[b'bookmark_moved'] = b'1'
 
     def _writerepo(self, repo):
         """Factored out for extensibility"""
@@ -225,7 +227,7 @@
             vfs = repo.vfs
             lock = repo.wlock()
         with lock:
-            with vfs('bookmarks', 'w', atomictemp=True, checkambig=True) as f:
+            with vfs(b'bookmarks', b'w', atomictemp=True, checkambig=True) as f:
                 self._write(f)
 
     def _writeactive(self):
@@ -234,25 +236,25 @@
         with self._repo.wlock():
             if self._active is not None:
                 with self._repo.vfs(
-                    'bookmarks.current', 'w', atomictemp=True, checkambig=True
+                    b'bookmarks.current', b'w', atomictemp=True, checkambig=True
                 ) as f:
                     f.write(encoding.fromlocal(self._active))
             else:
-                self._repo.vfs.tryunlink('bookmarks.current')
+                self._repo.vfs.tryunlink(b'bookmarks.current')
         self._aclean = True
 
     def _write(self, fp):
         for name, node in sorted(self._refmap.iteritems()):
-            fp.write("%s %s\n" % (hex(node), encoding.fromlocal(name)))
+            fp.write(b"%s %s\n" % (hex(node), encoding.fromlocal(name)))
         self._clean = True
         self._repo.invalidatevolatilesets()
 
     def expandname(self, bname):
-        if bname == '.':
+        if bname == b'.':
             if self.active:
                 return self.active
             else:
-                raise error.RepoLookupError(_("no active bookmark"))
+                raise error.RepoLookupError(_(b"no active bookmark"))
         return bname
 
     def checkconflict(self, mark, force=False, target=None):
@@ -267,7 +269,7 @@
 
         If divergent bookmark are to be deleted, they will be returned as list.
         """
-        cur = self._repo['.'].node()
+        cur = self._repo[b'.'].node()
         if mark in self._refmap and not force:
             if target:
                 if self._refmap[mark] == target and target == cur:
@@ -279,7 +281,7 @@
                 divs = [
                     self._refmap[b]
                     for b in self._refmap
-                    if b.split('@', 1)[0] == mark.split('@', 1)[0]
+                    if b.split(b'@', 1)[0] == mark.split(b'@', 1)[0]
                 ]
 
                 # allow resolving a single divergent bookmark even if moving
@@ -294,19 +296,19 @@
                 delbms = divergent2delete(self._repo, deletefrom, mark)
                 if validdest(self._repo, bmctx, self._repo[target]):
                     self._repo.ui.status(
-                        _("moving bookmark '%s' forward from %s\n")
+                        _(b"moving bookmark '%s' forward from %s\n")
                         % (mark, short(bmctx.node()))
                     )
                     return delbms
             raise error.Abort(
-                _("bookmark '%s' already exists " "(use -f to force)") % mark
+                _(b"bookmark '%s' already exists " b"(use -f to force)") % mark
             )
         if (
             mark in self._repo.branchmap()
             or mark == self._repo.dirstate.branch()
         ) and not force:
             raise error.Abort(
-                _("a bookmark cannot have the name of an existing branch")
+                _(b"a bookmark cannot have the name of an existing branch")
             )
         if len(mark) > 3 and not force:
             try:
@@ -316,9 +318,9 @@
             if shadowhash:
                 self._repo.ui.warn(
                     _(
-                        "bookmark %s matches a changeset hash\n"
-                        "(did you leave a -r out of an 'hg bookmark' "
-                        "command?)\n"
+                        b"bookmark %s matches a changeset hash\n"
+                        b"(did you leave a -r out of an 'hg bookmark' "
+                        b"command?)\n"
                     )
                     % mark
                 )
@@ -333,9 +335,9 @@
     """
     # No readline() in osutil.posixfile, reading everything is
     # cheap.
-    content = repo.vfs.tryread('bookmarks.current')
-    mark = encoding.tolocal((content.splitlines() or [''])[0])
-    if mark == '' or mark not in marks:
+    content = repo.vfs.tryread(b'bookmarks.current')
+    mark = encoding.tolocal((content.splitlines() or [b''])[0])
+    if mark == b'' or mark not in marks:
         mark = None
     return mark
 
@@ -379,9 +381,11 @@
     the list of bookmark to delete."""
     todelete = []
     marks = repo._bookmarks
-    divergent = [b for b in marks if b.split('@', 1)[0] == bm.split('@', 1)[0]]
+    divergent = [
+        b for b in marks if b.split(b'@', 1)[0] == bm.split(b'@', 1)[0]
+    ]
     for mark in divergent:
-        if mark == '@' or '@' not in mark:
+        if mark == b'@' or b'@' not in mark:
             # can't be divergent by definition
             continue
         if mark and marks[mark] in deletefrom:
@@ -404,12 +408,12 @@
     """
     if not repo._activebookmark:
         raise ValueError(
-            'headsforactive() only makes sense with an active bookmark'
+            b'headsforactive() only makes sense with an active bookmark'
         )
-    name = repo._activebookmark.split('@', 1)[0]
+    name = repo._activebookmark.split(b'@', 1)[0]
     heads = []
     for mark, n in repo._bookmarks.iteritems():
-        if mark.split('@', 1)[0] == name:
+        if mark.split(b'@', 1)[0] == name:
             heads.append(n)
     return heads
 
@@ -420,9 +424,9 @@
     checkout, movemarkfrom = None, None
     activemark = repo._activebookmark
     if isactivewdirparent(repo):
-        movemarkfrom = repo['.'].node()
+        movemarkfrom = repo[b'.'].node()
     elif activemark:
-        ui.status(_("updating to active bookmark %s\n") % activemark)
+        ui.status(_(b"updating to active bookmark %s\n") % activemark)
         checkout = activemark
     return (checkout, movemarkfrom)
 
@@ -440,7 +444,7 @@
         divs = [
             repo[marks[b]]
             for b in marks
-            if b.split('@', 1)[0] == active.split('@', 1)[0]
+            if b.split(b'@', 1)[0] == active.split(b'@', 1)[0]
         ]
         anc = repo.changelog.ancestors([new.rev()])
         deletefrom = [b.node() for b in divs if b.rev() in anc or b == new]
@@ -451,7 +455,7 @@
         bmchanges.append((bm, None))
 
     if bmchanges:
-        with repo.lock(), repo.transaction('bookmark') as tr:
+        with repo.lock(), repo.transaction(b'bookmark') as tr:
             marks.applychanges(repo, tr, bmchanges)
     return bool(bmchanges)
 
@@ -464,7 +468,7 @@
     hasnode = repo.changelog.hasnode
     for k, v in marks.iteritems():
         # don't expose local divergent bookmarks
-        if hasnode(v) and ('@' not in k or k.endswith('@')):
+        if hasnode(v) and (b'@' not in k or k.endswith(b'@')):
             yield k, v
 
 
@@ -480,12 +484,12 @@
         wlock = util.nullcontextmanager()
     else:
         wlock = repo.wlock()
-    with wlock, repo.lock(), repo.transaction('bookmarks') as tr:
+    with wlock, repo.lock(), repo.transaction(b'bookmarks') as tr:
         marks = repo._bookmarks
-        existing = hex(marks.get(key, ''))
+        existing = hex(marks.get(key, b''))
         if existing != old and existing != new:
             return False
-        if new == '':
+        if new == b'':
             changes = [(key, None)]
         else:
             if new not in repo:
@@ -580,21 +584,21 @@
     This reuses already existing one with "@number" suffix, if it
     refers ``remotenode``.
     '''
-    if b == '@':
-        b = ''
+    if b == b'@':
+        b = b''
     # try to use an @pathalias suffix
     # if an @pathalias already exists, we overwrite (update) it
-    if path.startswith("file:"):
+    if path.startswith(b"file:"):
         path = util.url(path).path
-    for p, u in ui.configitems("paths"):
-        if u.startswith("file:"):
+    for p, u in ui.configitems(b"paths"):
+        if u.startswith(b"file:"):
             u = util.url(u).path
         if path == u:
-            return '%s@%s' % (b, p)
+            return b'%s@%s' % (b, p)
 
     # assign a unique "@number" suffix newly
     for x in range(1, 100):
-        n = '%s@%d' % (b, x)
+        n = b'%s@%d' % (b, x)
         if n not in localmarks or localmarks[n] == remotenode:
             return n
 
@@ -608,7 +612,7 @@
     return binremotemarks
 
 
-_binaryentry = struct.Struct('>20sH')
+_binaryentry = struct.Struct(b'>20sH')
 
 
 def binaryencode(bookmarks):
@@ -630,7 +634,7 @@
             node = wdirid
         binarydata.append(_binaryentry.pack(node, len(book)))
         binarydata.append(book)
-    return ''.join(binarydata)
+    return b''.join(binarydata)
 
 
 def binarydecode(stream):
@@ -652,13 +656,13 @@
         entry = stream.read(entrysize)
         if len(entry) < entrysize:
             if entry:
-                raise error.Abort(_('bad bookmark stream'))
+                raise error.Abort(_(b'bad bookmark stream'))
             break
         node, length = _binaryentry.unpack(entry)
         bookmark = stream.read(length)
         if len(bookmark) < length:
             if entry:
-                raise error.Abort(_('bad bookmark stream'))
+                raise error.Abort(_(b'bad bookmark stream'))
         if node == wdirid:
             node = None
         books.append((bookmark, node))
@@ -666,7 +670,7 @@
 
 
 def updatefromremote(ui, repo, remotemarks, path, trfunc, explicit=()):
-    ui.debug("checking for updated bookmarks\n")
+    ui.debug(b"checking for updated bookmarks\n")
     localmarks = repo._bookmarks
     (
         addsrc,
@@ -681,7 +685,7 @@
 
     status = ui.status
     warn = ui.warn
-    if ui.configbool('ui', 'quietbookmarkmove'):
+    if ui.configbool(b'ui', b'quietbookmarkmove'):
         status = warn = ui.debug
 
     explicit = set(explicit)
@@ -689,24 +693,24 @@
     for b, scid, dcid in addsrc:
         if scid in repo:  # add remote bookmarks for changes we already have
             changed.append(
-                (b, scid, status, _("adding remote bookmark %s\n") % b)
+                (b, scid, status, _(b"adding remote bookmark %s\n") % b)
             )
         elif b in explicit:
             explicit.remove(b)
             ui.warn(
-                _("remote bookmark %s points to locally missing %s\n")
+                _(b"remote bookmark %s points to locally missing %s\n")
                 % (b, hex(scid)[:12])
             )
 
     for b, scid, dcid in advsrc:
-        changed.append((b, scid, status, _("updating bookmark %s\n") % b))
+        changed.append((b, scid, status, _(b"updating bookmark %s\n") % b))
     # remove normal movement from explicit set
     explicit.difference_update(d[0] for d in changed)
 
     for b, scid, dcid in diverge:
         if b in explicit:
             explicit.discard(b)
-            changed.append((b, scid, status, _("importing bookmark %s\n") % b))
+            changed.append((b, scid, status, _(b"importing bookmark %s\n") % b))
         else:
             db = _diverge(ui, b, path, localmarks, scid)
             if db:
@@ -715,26 +719,26 @@
                         db,
                         scid,
                         warn,
-                        _("divergent bookmark %s stored as %s\n") % (b, db),
+                        _(b"divergent bookmark %s stored as %s\n") % (b, db),
                     )
                 )
             else:
                 warn(
                     _(
-                        "warning: failed to assign numbered name "
-                        "to divergent bookmark %s\n"
+                        b"warning: failed to assign numbered name "
+                        b"to divergent bookmark %s\n"
                     )
                     % b
                 )
     for b, scid, dcid in adddst + advdst:
         if b in explicit:
             explicit.discard(b)
-            changed.append((b, scid, status, _("importing bookmark %s\n") % b))
+            changed.append((b, scid, status, _(b"importing bookmark %s\n") % b))
     for b, scid, dcid in differ:
         if b in explicit:
             explicit.remove(b)
             ui.warn(
-                _("remote bookmark %s points to locally missing %s\n")
+                _(b"remote bookmark %s points to locally missing %s\n")
                 % (b, hex(scid)[:12])
             )
 
@@ -750,11 +754,11 @@
 def incoming(ui, repo, peer):
     '''Show bookmarks incoming from other to repo
     '''
-    ui.status(_("searching for changed bookmarks\n"))
+    ui.status(_(b"searching for changed bookmarks\n"))
 
     with peer.commandexecutor() as e:
         remotemarks = unhexlifybookmarks(
-            e.callcommand('listkeys', {'namespace': 'bookmarks',}).result()
+            e.callcommand(b'listkeys', {b'namespace': b'bookmarks',}).result()
         )
 
     r = comparebookmarks(repo, remotemarks, repo._bookmarks)
@@ -768,28 +772,28 @@
     if ui.verbose:
 
         def add(b, id, st):
-            incomings.append("   %-25s %s %s\n" % (b, getid(id), st))
+            incomings.append(b"   %-25s %s %s\n" % (b, getid(id), st))
 
     else:
 
         def add(b, id, st):
-            incomings.append("   %-25s %s\n" % (b, getid(id)))
+            incomings.append(b"   %-25s %s\n" % (b, getid(id)))
 
     for b, scid, dcid in addsrc:
         # i18n: "added" refers to a bookmark
-        add(b, hex(scid), _('added'))
+        add(b, hex(scid), _(b'added'))
     for b, scid, dcid in advsrc:
         # i18n: "advanced" refers to a bookmark
-        add(b, hex(scid), _('advanced'))
+        add(b, hex(scid), _(b'advanced'))
     for b, scid, dcid in diverge:
         # i18n: "diverged" refers to a bookmark
-        add(b, hex(scid), _('diverged'))
+        add(b, hex(scid), _(b'diverged'))
     for b, scid, dcid in differ:
         # i18n: "changed" refers to a bookmark
-        add(b, hex(scid), _('changed'))
+        add(b, hex(scid), _(b'changed'))
 
     if not incomings:
-        ui.status(_("no changed bookmarks found\n"))
+        ui.status(_(b"no changed bookmarks found\n"))
         return 1
 
     for s in sorted(incomings):
@@ -801,9 +805,9 @@
 def outgoing(ui, repo, other):
     '''Show bookmarks outgoing from repo to other
     '''
-    ui.status(_("searching for changed bookmarks\n"))
+    ui.status(_(b"searching for changed bookmarks\n"))
 
-    remotemarks = unhexlifybookmarks(other.listkeys('bookmarks'))
+    remotemarks = unhexlifybookmarks(other.listkeys(b'bookmarks'))
     r = comparebookmarks(repo, repo._bookmarks, remotemarks)
     addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r
 
@@ -815,31 +819,31 @@
     if ui.verbose:
 
         def add(b, id, st):
-            outgoings.append("   %-25s %s %s\n" % (b, getid(id), st))
+            outgoings.append(b"   %-25s %s %s\n" % (b, getid(id), st))
 
     else:
 
         def add(b, id, st):
-            outgoings.append("   %-25s %s\n" % (b, getid(id)))
+            outgoings.append(b"   %-25s %s\n" % (b, getid(id)))
 
     for b, scid, dcid in addsrc:
         # i18n: "added refers to a bookmark
-        add(b, hex(scid), _('added'))
+        add(b, hex(scid), _(b'added'))
     for b, scid, dcid in adddst:
         # i18n: "deleted" refers to a bookmark
-        add(b, ' ' * 40, _('deleted'))
+        add(b, b' ' * 40, _(b'deleted'))
     for b, scid, dcid in advsrc:
         # i18n: "advanced" refers to a bookmark
-        add(b, hex(scid), _('advanced'))
+        add(b, hex(scid), _(b'advanced'))
     for b, scid, dcid in diverge:
         # i18n: "diverged" refers to a bookmark
-        add(b, hex(scid), _('diverged'))
+        add(b, hex(scid), _(b'diverged'))
     for b, scid, dcid in differ:
         # i18n: "changed" refers to a bookmark
-        add(b, hex(scid), _('changed'))
+        add(b, hex(scid), _(b'changed'))
 
     if not outgoings:
-        ui.status(_("no changed bookmarks found\n"))
+        ui.status(_(b"no changed bookmarks found\n"))
         return 1
 
     for s in sorted(outgoings):
@@ -855,7 +859,7 @@
     '''
     with peer.commandexecutor() as e:
         remotemarks = unhexlifybookmarks(
-            e.callcommand('listkeys', {'namespace': 'bookmarks',}).result()
+            e.callcommand(b'listkeys', {b'namespace': b'bookmarks',}).result()
         )
 
     r = comparebookmarks(repo, remotemarks, repo._bookmarks)
@@ -888,9 +892,9 @@
     mark = mark.strip()
     if not mark:
         raise error.Abort(
-            _("bookmark names cannot consist entirely of " "whitespace")
+            _(b"bookmark names cannot consist entirely of " b"whitespace")
         )
-    scmutil.checknewlabel(repo, mark, 'bookmark')
+    scmutil.checknewlabel(repo, mark, b'bookmark')
     return mark
 
 
@@ -903,7 +907,7 @@
     changes = []
     for mark in names:
         if mark not in marks:
-            raise error.Abort(_("bookmark '%s' does not exist") % mark)
+            raise error.Abort(_(b"bookmark '%s' does not exist") % mark)
         if mark == repo._activebookmark:
             deactivate(repo)
         changes.append((mark, None))
@@ -923,7 +927,7 @@
     marks = repo._bookmarks
     mark = checkformat(repo, new)
     if old not in marks:
-        raise error.Abort(_("bookmark '%s' does not exist") % old)
+        raise error.Abort(_(b"bookmark '%s' does not exist") % old)
     changes = []
     for bm in marks.checkconflict(mark, force):
         changes.append((bm, None))
@@ -945,14 +949,14 @@
     Raises an abort error if old is not in the bookmark store.
     """
     marks = repo._bookmarks
-    cur = repo['.'].node()
+    cur = repo[b'.'].node()
     newact = None
     changes = []
     hiddenrev = None
 
     # unhide revs if any
     if rev:
-        repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
+        repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
 
     for mark in names:
         mark = checkformat(repo, mark)
@@ -972,11 +976,11 @@
         changes.append((mark, tgt))
 
     if hiddenrev:
-        repo.ui.warn(_("bookmarking hidden changeset %s\n") % hiddenrev)
+        repo.ui.warn(_(b"bookmarking hidden changeset %s\n") % hiddenrev)
 
         if ctx.obsolete():
-            msg = obsutil._getfilteredreason(repo, "%s" % hiddenrev, ctx)
-            repo.ui.warn("(%s)\n" % msg)
+            msg = obsutil._getfilteredreason(repo, b"%s" % hiddenrev, ctx)
+            repo.ui.warn(b"(%s)\n" % msg)
 
     marks.applychanges(repo, tr, changes)
     if not inactive and cur == marks[newact] and not rev:
@@ -993,24 +997,24 @@
     """
     hexfn = fm.hexfunc
     if len(bmarks) == 0 and fm.isplain():
-        ui.status(_("no bookmarks set\n"))
+        ui.status(_(b"no bookmarks set\n"))
     for bmark, (n, prefix, label) in sorted(bmarks.iteritems()):
         fm.startitem()
         fm.context(repo=repo)
         if not ui.quiet:
-            fm.plain(' %s ' % prefix, label=label)
-        fm.write('bookmark', '%s', bmark, label=label)
-        pad = " " * (25 - encoding.colwidth(bmark))
+            fm.plain(b' %s ' % prefix, label=label)
+        fm.write(b'bookmark', b'%s', bmark, label=label)
+        pad = b" " * (25 - encoding.colwidth(bmark))
         fm.condwrite(
             not ui.quiet,
-            'rev node',
-            pad + ' %d:%s',
+            b'rev node',
+            pad + b' %d:%s',
             repo.changelog.rev(n),
             hexfn(n),
             label=label,
         )
         fm.data(active=(activebookmarklabel in label))
-        fm.plain('\n')
+        fm.plain(b'\n')
 
 
 def printbookmarks(ui, repo, fm, names=None):
@@ -1022,12 +1026,12 @@
     bmarks = {}
     for bmark in names or marks:
         if bmark not in marks:
-            raise error.Abort(_("bookmark '%s' does not exist") % bmark)
+            raise error.Abort(_(b"bookmark '%s' does not exist") % bmark)
         active = repo._activebookmark
         if bmark == active:
-            prefix, label = '*', activebookmarklabel
+            prefix, label = b'*', activebookmarklabel
         else:
-            prefix, label = ' ', ''
+            prefix, label = b' ', b''
 
         bmarks[bmark] = (marks[bmark], prefix, label)
     _printbookmarks(ui, repo, fm, bmarks)
@@ -1035,7 +1039,7 @@
 
 def preparehookargs(name, old, new):
     if new is None:
-        new = ''
+        new = b''
     if old is None:
-        old = ''
-    return {'bookmark': name, 'node': hex(new), 'oldnode': hex(old)}
+        old = b''
+    return {b'bookmark': name, b'node': hex(new), b'oldnode': hex(old)}
--- a/mercurial/branchmap.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/branchmap.py	Sun Oct 06 09:48:39 2019 -0400
@@ -110,7 +110,7 @@
 
             # Try to stick it as low as possible
             # filter above served are unlikely to be fetch from a clone
-            for candidate in ('base', 'immutable', 'served'):
+            for candidate in (b'base', b'immutable', b'served'):
                 rview = repo.filtered(candidate)
                 if cache.validfor(rview):
                     self._per_filter[candidate] = cache
@@ -129,9 +129,9 @@
 
 def _branchcachedesc(repo):
     if repo.filtername is not None:
-        return 'branch cache (%s)' % repo.filtername
+        return b'branch cache (%s)' % repo.filtername
     else:
-        return 'branch cache'
+        return b'branch cache'
 
 
 class branchcache(object):
@@ -245,7 +245,7 @@
         try:
             f = repo.cachevfs(cls._filename(repo))
             lineiter = iter(f)
-            cachekey = next(lineiter).rstrip('\n').split(" ", 2)
+            cachekey = next(lineiter).rstrip(b'\n').split(b" ", 2)
             last, lrev = cachekey[:2]
             last, lrev = bin(last), int(lrev)
             filteredhash = None
@@ -267,7 +267,7 @@
 
         except Exception as inst:
             if repo.ui.debugflag:
-                msg = 'invalid %s: %s\n'
+                msg = b'invalid %s: %s\n'
                 repo.ui.debug(
                     msg % (_branchcachedesc(repo), pycompat.bytestr(inst))
                 )
@@ -283,24 +283,24 @@
         """ fully loads the branchcache by reading from the file using the line
         iterator passed"""
         for line in lineiter:
-            line = line.rstrip('\n')
+            line = line.rstrip(b'\n')
             if not line:
                 continue
-            node, state, label = line.split(" ", 2)
-            if state not in 'oc':
+            node, state, label = line.split(b" ", 2)
+            if state not in b'oc':
                 raise ValueError(r'invalid branch state')
             label = encoding.tolocal(label.strip())
             node = bin(node)
             self._entries.setdefault(label, []).append(node)
-            if state == 'c':
+            if state == b'c':
                 self._closednodes.add(node)
 
     @staticmethod
     def _filename(repo):
         """name of a branchcache file for a given repo or repoview"""
-        filename = "branch2"
+        filename = b"branch2"
         if repo.filtername:
-            filename = '%s-%s' % (filename, repo.filtername)
+            filename = b'%s-%s' % (filename, repo.filtername)
         return filename
 
     def validfor(self, repo):
@@ -364,25 +364,25 @@
 
     def write(self, repo):
         try:
-            f = repo.cachevfs(self._filename(repo), "w", atomictemp=True)
-            cachekey = [hex(self.tipnode), '%d' % self.tiprev]
+            f = repo.cachevfs(self._filename(repo), b"w", atomictemp=True)
+            cachekey = [hex(self.tipnode), b'%d' % self.tiprev]
             if self.filteredhash is not None:
                 cachekey.append(hex(self.filteredhash))
-            f.write(" ".join(cachekey) + '\n')
+            f.write(b" ".join(cachekey) + b'\n')
             nodecount = 0
             for label, nodes in sorted(self._entries.iteritems()):
                 label = encoding.fromlocal(label)
                 for node in nodes:
                     nodecount += 1
                     if node in self._closednodes:
-                        state = 'c'
+                        state = b'c'
                     else:
-                        state = 'o'
-                    f.write("%s %s %s\n" % (hex(node), state, label))
+                        state = b'o'
+                    f.write(b"%s %s %s\n" % (hex(node), state, label))
             f.close()
             repo.ui.log(
-                'branchcache',
-                'wrote %s with %d labels and %d nodes\n',
+                b'branchcache',
+                b'wrote %s with %d labels and %d nodes\n',
                 _branchcachedesc(repo),
                 len(self._entries),
                 nodecount,
@@ -390,7 +390,7 @@
         except (IOError, OSError, error.Abort) as inst:
             # Abort may be raised by read only opener, so log and continue
             repo.ui.debug(
-                "couldn't write branch cache: %s\n"
+                b"couldn't write branch cache: %s\n"
                 % stringutil.forcebytestr(inst)
             )
 
@@ -460,8 +460,8 @@
 
         duration = util.timer() - starttime
         repo.ui.log(
-            'branchcache',
-            'updated %s in %.4f seconds\n',
+            b'branchcache',
+            b'updated %s in %.4f seconds\n',
             _branchcachedesc(repo),
             duration,
         )
@@ -478,11 +478,11 @@
 
 # Revision branch info cache
 
-_rbcversion = '-v1'
-_rbcnames = 'rbc-names' + _rbcversion
-_rbcrevs = 'rbc-revs' + _rbcversion
+_rbcversion = b'-v1'
+_rbcnames = b'rbc-names' + _rbcversion
+_rbcrevs = b'rbc-revs' + _rbcversion
 # [4 byte hash prefix][4 byte branch name number with sign bit indicating open]
-_rbcrecfmt = '>4sI'
+_rbcrecfmt = b'>4sI'
 _rbcrecsize = calcsize(_rbcrecfmt)
 _rbcnodelen = 4
 _rbcbranchidxmask = 0x7FFFFFFF
@@ -523,7 +523,7 @@
             self._rbcsnameslen = len(bndata)  # for verification before writing
             if bndata:
                 self._names = [
-                    encoding.tolocal(bn) for bn in bndata.split('\0')
+                    encoding.tolocal(bn) for bn in bndata.split(b'\0')
                 ]
         except (IOError, OSError):
             if readonly:
@@ -536,7 +536,7 @@
                 self._rbcrevs[:] = data
             except (IOError, OSError) as inst:
                 repo.ui.debug(
-                    "couldn't read revision branch cache: %s\n"
+                    b"couldn't read revision branch cache: %s\n"
                     % stringutil.forcebytestr(inst)
                 )
         # remember number of good records on disk
@@ -554,7 +554,7 @@
         self._rbcnamescount = 0
         self._rbcrevslen = len(self._repo.changelog)
         self._rbcrevs = bytearray(self._rbcrevslen * _rbcrecsize)
-        util.clearcachedproperty(self, '_namesreverse')
+        util.clearcachedproperty(self, b'_namesreverse')
 
     @util.propertycache
     def _namesreverse(self):
@@ -582,7 +582,7 @@
         close = bool(branchidx & _rbccloseflag)
         if close:
             branchidx &= _rbcbranchidxmask
-        if cachenode == '\0\0\0\0':
+        if cachenode == b'\0\0\0\0':
             pass
         elif cachenode == reponode:
             try:
@@ -590,15 +590,15 @@
             except IndexError:
                 # recover from invalid reference to unknown branch
                 self._repo.ui.debug(
-                    "referenced branch names not found"
-                    " - rebuilding revision branch cache from scratch\n"
+                    b"referenced branch names not found"
+                    b" - rebuilding revision branch cache from scratch\n"
                 )
                 self._clear()
         else:
             # rev/node map has changed, invalidate the cache from here up
             self._repo.ui.debug(
-                "history modification detected - truncating "
-                "revision branch cache to revision %d\n" % rev
+                b"history modification detected - truncating "
+                b"revision branch cache to revision %d\n" % rev
             )
             truncate = rbcrevidx + _rbcrecsize
             del self._rbcrevs[truncate:]
@@ -650,7 +650,7 @@
         rbcrevidx = rev * _rbcrecsize
         if len(self._rbcrevs) < rbcrevidx + _rbcrecsize:
             self._rbcrevs.extend(
-                '\0'
+                b'\0'
                 * (len(self._repo.changelog) * _rbcrecsize - len(self._rbcrevs))
             )
         pack_into(_rbcrecfmt, self._rbcrevs, rbcrevidx, node, branchidx)
@@ -658,31 +658,31 @@
 
         tr = self._repo.currenttransaction()
         if tr:
-            tr.addfinalize('write-revbranchcache', self.write)
+            tr.addfinalize(b'write-revbranchcache', self.write)
 
     def write(self, tr=None):
         """Save branch cache if it is dirty."""
         repo = self._repo
         wlock = None
-        step = ''
+        step = b''
         try:
             # write the new names
             if self._rbcnamescount < len(self._names):
                 wlock = repo.wlock(wait=False)
-                step = ' names'
+                step = b' names'
                 self._writenames(repo)
 
             # write the new revs
             start = self._rbcrevslen * _rbcrecsize
             if start != len(self._rbcrevs):
-                step = ''
+                step = b''
                 if wlock is None:
                     wlock = repo.wlock(wait=False)
                 self._writerevs(repo, start)
 
         except (IOError, OSError, error.Abort, error.LockError) as inst:
             repo.ui.debug(
-                "couldn't write revision branch cache%s: %s\n"
+                b"couldn't write revision branch cache%s: %s\n"
                 % (step, stringutil.forcebytestr(inst))
             )
         finally:
@@ -692,20 +692,20 @@
     def _writenames(self, repo):
         """ write the new branch names to revbranchcache """
         if self._rbcnamescount != 0:
-            f = repo.cachevfs.open(_rbcnames, 'ab')
+            f = repo.cachevfs.open(_rbcnames, b'ab')
             if f.tell() == self._rbcsnameslen:
-                f.write('\0')
+                f.write(b'\0')
             else:
                 f.close()
-                repo.ui.debug("%s changed - rewriting it\n" % _rbcnames)
+                repo.ui.debug(b"%s changed - rewriting it\n" % _rbcnames)
                 self._rbcnamescount = 0
                 self._rbcrevslen = 0
         if self._rbcnamescount == 0:
             # before rewriting names, make sure references are removed
             repo.cachevfs.unlinkpath(_rbcrevs, ignoremissing=True)
-            f = repo.cachevfs.open(_rbcnames, 'wb')
+            f = repo.cachevfs.open(_rbcnames, b'wb')
         f.write(
-            '\0'.join(
+            b'\0'.join(
                 encoding.fromlocal(b)
                 for b in self._names[self._rbcnamescount :]
             )
@@ -717,9 +717,11 @@
     def _writerevs(self, repo, start):
         """ write the new revs to revbranchcache """
         revs = min(len(repo.changelog), len(self._rbcrevs) // _rbcrecsize)
-        with repo.cachevfs.open(_rbcrevs, 'ab') as f:
+        with repo.cachevfs.open(_rbcrevs, b'ab') as f:
             if f.tell() != start:
-                repo.ui.debug("truncating cache/%s to %d\n" % (_rbcrevs, start))
+                repo.ui.debug(
+                    b"truncating cache/%s to %d\n" % (_rbcrevs, start)
+                )
                 f.seek(start)
                 if f.tell() != start:
                     start = 0
--- a/mercurial/bundle2.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/bundle2.py	Sun Oct 06 09:48:39 2019 -0400
@@ -179,28 +179,28 @@
 _pack = struct.pack
 _unpack = struct.unpack
 
-_fstreamparamsize = '>i'
-_fpartheadersize = '>i'
-_fparttypesize = '>B'
-_fpartid = '>I'
-_fpayloadsize = '>i'
-_fpartparamcount = '>BB'
+_fstreamparamsize = b'>i'
+_fpartheadersize = b'>i'
+_fparttypesize = b'>B'
+_fpartid = b'>I'
+_fpayloadsize = b'>i'
+_fpartparamcount = b'>BB'
 
 preferedchunksize = 32768
 
-_parttypeforbidden = re.compile('[^a-zA-Z0-9_:-]')
+_parttypeforbidden = re.compile(b'[^a-zA-Z0-9_:-]')
 
 
 def outdebug(ui, message):
     """debug regarding output stream (bundling)"""
-    if ui.configbool('devel', 'bundle2.debug'):
-        ui.debug('bundle2-output: %s\n' % message)
+    if ui.configbool(b'devel', b'bundle2.debug'):
+        ui.debug(b'bundle2-output: %s\n' % message)
 
 
 def indebug(ui, message):
     """debug on input stream (unbundling)"""
-    if ui.configbool('devel', 'bundle2.debug'):
-        ui.debug('bundle2-input: %s\n' % message)
+    if ui.configbool(b'devel', b'bundle2.debug'):
+        ui.debug(b'bundle2-input: %s\n' % message)
 
 
 def validateparttype(parttype):
@@ -215,7 +215,7 @@
     The number parameters is variable so we need to build that format
     dynamically.
     """
-    return '>' + ('BB' * nbparams)
+    return b'>' + (b'BB' * nbparams)
 
 
 parthandlermapping = {}
@@ -307,7 +307,7 @@
     * a way to construct a bundle response when applicable.
     """
 
-    def __init__(self, repo, transactiongetter, captureoutput=True, source=''):
+    def __init__(self, repo, transactiongetter, captureoutput=True, source=b''):
         self.repo = repo
         self.ui = repo.ui
         self.records = unbundlerecords()
@@ -337,8 +337,8 @@
     def addhookargs(self, hookargs):
         if self.hookargs is None:
             raise error.ProgrammingError(
-                'attempted to add hookargs to '
-                'operation after transaction started'
+                b'attempted to add hookargs to '
+                b'operation after transaction started'
             )
         self.hookargs.update(hookargs)
 
@@ -358,11 +358,11 @@
 def applybundle(repo, unbundler, tr, source, url=None, **kwargs):
     # transform me into unbundler.apply() as soon as the freeze is lifted
     if isinstance(unbundler, unbundle20):
-        tr.hookargs['bundle2'] = '1'
-        if source is not None and 'source' not in tr.hookargs:
-            tr.hookargs['source'] = source
-        if url is not None and 'url' not in tr.hookargs:
-            tr.hookargs['url'] = url
+        tr.hookargs[b'bundle2'] = b'1'
+        if source is not None and b'source' not in tr.hookargs:
+            tr.hookargs[b'source'] = source
+        if url is not None and b'url' not in tr.hookargs:
+            tr.hookargs[b'url'] = url
         return processbundle(repo, unbundler, lambda: tr, source=source)
     else:
         # the transactiongetter won't be used, but we might as well set it
@@ -438,11 +438,11 @@
                 raise exc
 
         self.repo.ui.debug(
-            'bundle2-input-bundle: %i parts total\n' % self.count
+            b'bundle2-input-bundle: %i parts total\n' % self.count
         )
 
 
-def processbundle(repo, unbundler, transactiongetter=None, op=None, source=''):
+def processbundle(repo, unbundler, transactiongetter=None, op=None, source=b''):
     """This function process a bundle, apply effect to/from a repo
 
     It iterates over each part then searches for and uses the proper handling
@@ -464,15 +464,15 @@
     # - exception catching
     unbundler.params
     if repo.ui.debugflag:
-        msg = ['bundle2-input-bundle:']
+        msg = [b'bundle2-input-bundle:']
         if unbundler.params:
-            msg.append(' %i params' % len(unbundler.params))
+            msg.append(b' %i params' % len(unbundler.params))
         if op._gettransaction is None or op._gettransaction is _notransaction:
-            msg.append(' no-transaction')
+            msg.append(b' no-transaction')
         else:
-            msg.append(' with-transaction')
-        msg.append('\n')
-        repo.ui.debug(''.join(msg))
+            msg.append(b' with-transaction')
+        msg.append(b'\n')
+        repo.ui.debug(b''.join(msg))
 
     processparts(repo, op, unbundler)
 
@@ -487,48 +487,48 @@
 
 def _processchangegroup(op, cg, tr, source, url, **kwargs):
     ret = cg.apply(op.repo, tr, source, url, **kwargs)
-    op.records.add('changegroup', {'return': ret,})
+    op.records.add(b'changegroup', {b'return': ret,})
     return ret
 
 
 def _gethandler(op, part):
-    status = 'unknown'  # used by debug output
+    status = b'unknown'  # used by debug output
     try:
         handler = parthandlermapping.get(part.type)
         if handler is None:
-            status = 'unsupported-type'
+            status = b'unsupported-type'
             raise error.BundleUnknownFeatureError(parttype=part.type)
-        indebug(op.ui, 'found a handler for part %s' % part.type)
+        indebug(op.ui, b'found a handler for part %s' % part.type)
         unknownparams = part.mandatorykeys - handler.params
         if unknownparams:
             unknownparams = list(unknownparams)
             unknownparams.sort()
-            status = 'unsupported-params (%s)' % ', '.join(unknownparams)
+            status = b'unsupported-params (%s)' % b', '.join(unknownparams)
             raise error.BundleUnknownFeatureError(
                 parttype=part.type, params=unknownparams
             )
-        status = 'supported'
+        status = b'supported'
     except error.BundleUnknownFeatureError as exc:
         if part.mandatory:  # mandatory parts
             raise
-        indebug(op.ui, 'ignoring unsupported advisory part %s' % exc)
+        indebug(op.ui, b'ignoring unsupported advisory part %s' % exc)
         return  # skip to part processing
     finally:
         if op.ui.debugflag:
-            msg = ['bundle2-input-part: "%s"' % part.type]
+            msg = [b'bundle2-input-part: "%s"' % part.type]
             if not part.mandatory:
-                msg.append(' (advisory)')
+                msg.append(b' (advisory)')
             nbmp = len(part.mandatorykeys)
             nbap = len(part.params) - nbmp
             if nbmp or nbap:
-                msg.append(' (params:')
+                msg.append(b' (params:')
                 if nbmp:
-                    msg.append(' %i mandatory' % nbmp)
+                    msg.append(b' %i mandatory' % nbmp)
                 if nbap:
-                    msg.append(' %i advisory' % nbmp)
-                msg.append(')')
-            msg.append(' %s\n' % status)
-            op.ui.debug(''.join(msg))
+                    msg.append(b' %i advisory' % nbmp)
+                msg.append(b')')
+            msg.append(b' %s\n' % status)
+            op.ui.debug(b''.join(msg))
 
     return handler
 
@@ -549,16 +549,16 @@
     output = None
     if op.captureoutput and op.reply is not None:
         op.ui.pushbuffer(error=True, subproc=True)
-        output = ''
+        output = b''
     try:
         handler(op, part)
     finally:
         if output is not None:
             output = op.ui.popbuffer()
         if output:
-            outpart = op.reply.newpart('output', data=output, mandatory=False)
+            outpart = op.reply.newpart(b'output', data=output, mandatory=False)
             outpart.addparam(
-                'in-reply-to', pycompat.bytestr(part.id), mandatory=False
+                b'in-reply-to', pycompat.bytestr(part.id), mandatory=False
             )
 
 
@@ -575,11 +575,11 @@
     for line in blob.splitlines():
         if not line:
             continue
-        if '=' not in line:
+        if b'=' not in line:
             key, vals = line, ()
         else:
-            key, vals = line.split('=', 1)
-            vals = vals.split(',')
+            key, vals = line.split(b'=', 1)
+            vals = vals.split(b',')
         key = urlreq.unquote(key)
         vals = [urlreq.unquote(v) for v in vals]
         caps[key] = vals
@@ -594,23 +594,23 @@
         ca = urlreq.quote(ca)
         vals = [urlreq.quote(v) for v in vals]
         if vals:
-            ca = "%s=%s" % (ca, ','.join(vals))
+            ca = b"%s=%s" % (ca, b','.join(vals))
         chunks.append(ca)
-    return '\n'.join(chunks)
+    return b'\n'.join(chunks)
 
 
 bundletypes = {
-    "": ("", 'UN'),  # only when using unbundle on ssh and old http servers
+    b"": (b"", b'UN'),  # only when using unbundle on ssh and old http servers
     # since the unification ssh accepts a header but there
     # is no capability signaling it.
-    "HG20": (),  # special-cased below
-    "HG10UN": ("HG10UN", 'UN'),
-    "HG10BZ": ("HG10", 'BZ'),
-    "HG10GZ": ("HG10GZ", 'GZ'),
+    b"HG20": (),  # special-cased below
+    b"HG10UN": (b"HG10UN", b'UN'),
+    b"HG10BZ": (b"HG10", b'BZ'),
+    b"HG10GZ": (b"HG10GZ", b'GZ'),
 }
 
 # hgweb uses this list to communicate its preferred type
-bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN']
+bundlepriority = [b'HG10GZ', b'HG10BZ', b'HG10UN']
 
 
 class bundle20(object):
@@ -620,14 +620,14 @@
     populate it. Then call `getchunks` to retrieve all the binary chunks of
     data that compose the bundle2 container."""
 
-    _magicstring = 'HG20'
+    _magicstring = b'HG20'
 
     def __init__(self, ui, capabilities=()):
         self.ui = ui
         self._params = []
         self._parts = []
         self.capabilities = dict(capabilities)
-        self._compengine = util.compengines.forbundletype('UN')
+        self._compengine = util.compengines.forbundletype(b'UN')
         self._compopts = None
         # If compression is being handled by a consumer of the raw
         # data (e.g. the wire protocol), unsetting this flag tells
@@ -636,10 +636,10 @@
 
     def setcompression(self, alg, compopts=None):
         """setup core part compression to <alg>"""
-        if alg in (None, 'UN'):
+        if alg in (None, b'UN'):
             return
-        assert not any(n.lower() == 'compression' for n, v in self._params)
-        self.addparam('Compression', alg)
+        assert not any(n.lower() == b'compression' for n, v in self._params)
+        self.addparam(b'Compression', alg)
         self._compengine = util.compengines.forbundletype(alg)
         self._compopts = compopts
 
@@ -683,15 +683,15 @@
     # methods used to generate the bundle2 stream
     def getchunks(self):
         if self.ui.debugflag:
-            msg = ['bundle2-output-bundle: "%s",' % self._magicstring]
+            msg = [b'bundle2-output-bundle: "%s",' % self._magicstring]
             if self._params:
-                msg.append(' (%i params)' % len(self._params))
-            msg.append(' %i parts total\n' % len(self._parts))
-            self.ui.debug(''.join(msg))
-        outdebug(self.ui, 'start emission of %s stream' % self._magicstring)
+                msg.append(b' (%i params)' % len(self._params))
+            msg.append(b' %i parts total\n' % len(self._parts))
+            self.ui.debug(b''.join(msg))
+        outdebug(self.ui, b'start emission of %s stream' % self._magicstring)
         yield self._magicstring
         param = self._paramchunk()
-        outdebug(self.ui, 'bundle parameter: %s' % param)
+        outdebug(self.ui, b'bundle parameter: %s' % param)
         yield _pack(_fstreamparamsize, len(param))
         if param:
             yield param
@@ -707,20 +707,20 @@
             par = urlreq.quote(par)
             if value is not None:
                 value = urlreq.quote(value)
-                par = '%s=%s' % (par, value)
+                par = b'%s=%s' % (par, value)
             blocks.append(par)
-        return ' '.join(blocks)
+        return b' '.join(blocks)
 
     def _getcorechunk(self):
         """yield chunk for the core part of the bundle
 
         (all but headers and parameters)"""
-        outdebug(self.ui, 'start of parts')
+        outdebug(self.ui, b'start of parts')
         for part in self._parts:
-            outdebug(self.ui, 'bundle part: "%s"' % part.type)
+            outdebug(self.ui, b'bundle part: "%s"' % part.type)
             for chunk in part.getchunks(ui=self.ui):
                 yield chunk
-        outdebug(self.ui, 'end of bundle')
+        outdebug(self.ui, b'end of bundle')
         yield _pack(_fpartheadersize, 0)
 
     def salvageoutput(self):
@@ -730,7 +730,7 @@
         server output"""
         salvaged = []
         for part in self._parts:
-            if part.type.startswith('output'):
+            if part.type.startswith(b'output'):
                 salvaged.append(part.copy())
         return salvaged
 
@@ -768,17 +768,17 @@
     if magicstring is None:
         magicstring = changegroup.readexactly(fp, 4)
     magic, version = magicstring[0:2], magicstring[2:4]
-    if magic != 'HG':
+    if magic != b'HG':
         ui.debug(
-            "error: invalid magic: %r (version %r), should be 'HG'\n"
+            b"error: invalid magic: %r (version %r), should be 'HG'\n"
             % (magic, version)
         )
-        raise error.Abort(_('not a Mercurial bundle'))
+        raise error.Abort(_(b'not a Mercurial bundle'))
     unbundlerclass = formatmap.get(version)
     if unbundlerclass is None:
-        raise error.Abort(_('unknown bundle version %s') % version)
+        raise error.Abort(_(b'unknown bundle version %s') % version)
     unbundler = unbundlerclass(ui, fp)
-    indebug(ui, 'start processing of %s stream' % magicstring)
+    indebug(ui, b'start processing of %s stream' % magicstring)
     return unbundler
 
 
@@ -788,24 +788,24 @@
     This class is fed with a binary stream and yields parts through its
     `iterparts` methods."""
 
-    _magicstring = 'HG20'
+    _magicstring = b'HG20'
 
     def __init__(self, ui, fp):
         """If header is specified, we do not read it out of the stream."""
         self.ui = ui
-        self._compengine = util.compengines.forbundletype('UN')
+        self._compengine = util.compengines.forbundletype(b'UN')
         self._compressed = None
         super(unbundle20, self).__init__(fp)
 
     @util.propertycache
     def params(self):
         """dictionary of stream level parameters"""
-        indebug(self.ui, 'reading bundle2 stream parameters')
+        indebug(self.ui, b'reading bundle2 stream parameters')
         params = {}
         paramssize = self._unpack(_fstreamparamsize)[0]
         if paramssize < 0:
             raise error.BundleValueError(
-                'negative bundle param size: %i' % paramssize
+                b'negative bundle param size: %i' % paramssize
             )
         if paramssize:
             params = self._readexact(paramssize)
@@ -815,8 +815,8 @@
     def _processallparams(self, paramsblock):
         """"""
         params = util.sortdict()
-        for p in paramsblock.split(' '):
-            p = p.split('=', 1)
+        for p in paramsblock.split(b' '):
+            p = p.split(b'=', 1)
             p = [urlreq.unquote(i) for i in p]
             if len(p) < 2:
                 p.append(None)
@@ -842,7 +842,7 @@
             handler = b2streamparamsmap[name.lower()]
         except KeyError:
             if name[0:1].islower():
-                indebug(self.ui, "ignoring unknown parameter %s" % name)
+                indebug(self.ui, b"ignoring unknown parameter %s" % name)
             else:
                 raise error.BundleUnknownFeatureError(params=(name,))
         else:
@@ -857,11 +857,11 @@
         needed to move forward to get general delta enabled.
         """
         yield self._magicstring
-        assert 'params' not in vars(self)
+        assert b'params' not in vars(self)
         paramssize = self._unpack(_fstreamparamsize)[0]
         if paramssize < 0:
             raise error.BundleValueError(
-                'negative bundle param size: %i' % paramssize
+                b'negative bundle param size: %i' % paramssize
             )
         if paramssize:
             params = self._readexact(paramssize)
@@ -869,11 +869,11 @@
             # The payload itself is decompressed below, so drop
             # the compression parameter passed down to compensate.
             outparams = []
-            for p in params.split(' '):
-                k, v = p.split('=', 1)
-                if k.lower() != 'compression':
+            for p in params.split(b' '):
+                k, v = p.split(b'=', 1)
+                if k.lower() != b'compression':
                     outparams.append(p)
-            outparams = ' '.join(outparams)
+            outparams = b' '.join(outparams)
             yield _pack(_fstreamparamsize, len(outparams))
             yield outparams
         else:
@@ -894,7 +894,7 @@
             if size == flaginterrupt:
                 continue
             elif size < 0:
-                raise error.BundleValueError('negative chunk size: %i')
+                raise error.BundleValueError(b'negative chunk size: %i')
             yield self._readexact(size)
 
     def iterparts(self, seekable=False):
@@ -904,7 +904,7 @@
         self.params
         # From there, payload need to be decompressed
         self._fp = self._compengine.decompressorreader(self._fp)
-        indebug(self.ui, 'start extraction of bundle2 parts')
+        indebug(self.ui, b'start extraction of bundle2 parts')
         headerblock = self._readpartheader()
         while headerblock is not None:
             part = cls(self.ui, headerblock, self._fp)
@@ -914,7 +914,7 @@
             part.consume()
 
             headerblock = self._readpartheader()
-        indebug(self.ui, 'end of bundle2 stream')
+        indebug(self.ui, b'end of bundle2 stream')
 
     def _readpartheader(self):
         """reads a part header size and return the bytes blob
@@ -923,9 +923,9 @@
         headersize = self._unpack(_fpartheadersize)[0]
         if headersize < 0:
             raise error.BundleValueError(
-                'negative part header size: %i' % headersize
+                b'negative part header size: %i' % headersize
             )
-        indebug(self.ui, 'part header size: %i' % headersize)
+        indebug(self.ui, b'part header size: %i' % headersize)
         if headersize:
             return self._readexact(headersize)
         return None
@@ -936,11 +936,11 @@
 
     def close(self):
         """close underlying file"""
-        if util.safehasattr(self._fp, 'close'):
+        if util.safehasattr(self._fp, b'close'):
             return self._fp.close()
 
 
-formatmap = {'20': unbundle20}
+formatmap = {b'20': unbundle20}
 
 b2streamparamsmap = {}
 
@@ -956,7 +956,7 @@
     return decorator
 
 
-@b2streamparamhandler('compression')
+@b2streamparamhandler(b'compression')
 def processcompression(unbundler, param, value):
     """read compression parameter and install payload decompression"""
     if value not in util.compengines.supportedbundletypes:
@@ -987,7 +987,7 @@
         parttype,
         mandatoryparams=(),
         advisoryparams=(),
-        data='',
+        data=b'',
         mandatory=True,
     ):
         validateparttype(parttype)
@@ -1000,7 +1000,7 @@
         self._seenparams = set()
         for pname, __ in self._mandatoryparams + self._advisoryparams:
             if pname in self._seenparams:
-                raise error.ProgrammingError('duplicated params: %s' % pname)
+                raise error.ProgrammingError(b'duplicated params: %s' % pname)
             self._seenparams.add(pname)
         # status of the part's generation:
         # - None: not started,
@@ -1010,8 +1010,8 @@
         self.mandatory = mandatory
 
     def __repr__(self):
-        cls = "%s.%s" % (self.__class__.__module__, self.__class__.__name__)
-        return '<%s object at %x; id: %s; type: %s; mandatory: %s>' % (
+        cls = b"%s.%s" % (self.__class__.__module__, self.__class__.__name__)
+        return b'<%s object at %x; id: %s; type: %s; mandatory: %s>' % (
             cls,
             id(self),
             self.id,
@@ -1024,7 +1024,7 @@
 
         The new part have the very same content but no partid assigned yet.
         Parts with generated data cannot be copied."""
-        assert not util.safehasattr(self.data, 'next')
+        assert not util.safehasattr(self.data, b'next')
         return self.__class__(
             self.type,
             self._mandatoryparams,
@@ -1041,7 +1041,7 @@
     @data.setter
     def data(self, data):
         if self._generated is not None:
-            raise error.ReadOnlyPartError('part is being generated')
+            raise error.ReadOnlyPartError(b'part is being generated')
         self._data = data
 
     @property
@@ -1054,7 +1054,7 @@
         # make it an immutable tuple to force people through ``addparam``
         return tuple(self._advisoryparams)
 
-    def addparam(self, name, value='', mandatory=True):
+    def addparam(self, name, value=b'', mandatory=True):
         """add a parameter to the part
 
         If 'mandatory' is set to True, the remote handler must claim support
@@ -1063,9 +1063,9 @@
         The 'name' and 'value' cannot exceed 255 bytes each.
         """
         if self._generated is not None:
-            raise error.ReadOnlyPartError('part is being generated')
+            raise error.ReadOnlyPartError(b'part is being generated')
         if name in self._seenparams:
-            raise ValueError('duplicated params: %s' % name)
+            raise ValueError(b'duplicated params: %s' % name)
         self._seenparams.add(name)
         params = self._advisoryparams
         if mandatory:
@@ -1075,39 +1075,39 @@
     # methods used to generates the bundle2 stream
     def getchunks(self, ui):
         if self._generated is not None:
-            raise error.ProgrammingError('part can only be consumed once')
+            raise error.ProgrammingError(b'part can only be consumed once')
         self._generated = False
 
         if ui.debugflag:
-            msg = ['bundle2-output-part: "%s"' % self.type]
+            msg = [b'bundle2-output-part: "%s"' % self.type]
             if not self.mandatory:
-                msg.append(' (advisory)')
+                msg.append(b' (advisory)')
             nbmp = len(self.mandatoryparams)
             nbap = len(self.advisoryparams)
             if nbmp or nbap:
-                msg.append(' (params:')
+                msg.append(b' (params:')
                 if nbmp:
-                    msg.append(' %i mandatory' % nbmp)
+                    msg.append(b' %i mandatory' % nbmp)
                 if nbap:
-                    msg.append(' %i advisory' % nbmp)
-                msg.append(')')
+                    msg.append(b' %i advisory' % nbmp)
+                msg.append(b')')
             if not self.data:
-                msg.append(' empty payload')
-            elif util.safehasattr(self.data, 'next') or util.safehasattr(
-                self.data, '__next__'
+                msg.append(b' empty payload')
+            elif util.safehasattr(self.data, b'next') or util.safehasattr(
+                self.data, b'__next__'
             ):
-                msg.append(' streamed payload')
+                msg.append(b' streamed payload')
             else:
-                msg.append(' %i bytes payload' % len(self.data))
-            msg.append('\n')
-            ui.debug(''.join(msg))
+                msg.append(b' %i bytes payload' % len(self.data))
+            msg.append(b'\n')
+            ui.debug(b''.join(msg))
 
         #### header
         if self.mandatory:
             parttype = self.type.upper()
         else:
             parttype = self.type.lower()
-        outdebug(ui, 'part %s: "%s"' % (pycompat.bytestr(self.id), parttype))
+        outdebug(ui, b'part %s: "%s"' % (pycompat.bytestr(self.id), parttype))
         ## parttype
         header = [
             _pack(_fparttypesize, len(parttype)),
@@ -1138,48 +1138,48 @@
             header.append(value)
         ## finalize header
         try:
-            headerchunk = ''.join(header)
+            headerchunk = b''.join(header)
         except TypeError:
             raise TypeError(
                 r'Found a non-bytes trying to '
                 r'build bundle part header: %r' % header
             )
-        outdebug(ui, 'header chunk size: %i' % len(headerchunk))
+        outdebug(ui, b'header chunk size: %i' % len(headerchunk))
         yield _pack(_fpartheadersize, len(headerchunk))
         yield headerchunk
         ## payload
         try:
             for chunk in self._payloadchunks():
-                outdebug(ui, 'payload chunk size: %i' % len(chunk))
+                outdebug(ui, b'payload chunk size: %i' % len(chunk))
                 yield _pack(_fpayloadsize, len(chunk))
                 yield chunk
         except GeneratorExit:
             # GeneratorExit means that nobody is listening for our
             # results anyway, so just bail quickly rather than trying
             # to produce an error part.
-            ui.debug('bundle2-generatorexit\n')
+            ui.debug(b'bundle2-generatorexit\n')
             raise
         except BaseException as exc:
             bexc = stringutil.forcebytestr(exc)
             # backup exception data for later
             ui.debug(
-                'bundle2-input-stream-interrupt: encoding exception %s' % bexc
+                b'bundle2-input-stream-interrupt: encoding exception %s' % bexc
             )
             tb = sys.exc_info()[2]
-            msg = 'unexpected error: %s' % bexc
+            msg = b'unexpected error: %s' % bexc
             interpart = bundlepart(
-                'error:abort', [('message', msg)], mandatory=False
+                b'error:abort', [(b'message', msg)], mandatory=False
             )
             interpart.id = 0
             yield _pack(_fpayloadsize, -1)
             for chunk in interpart.getchunks(ui=ui):
                 yield chunk
-            outdebug(ui, 'closing payload chunk')
+            outdebug(ui, b'closing payload chunk')
             # abort current part payload
             yield _pack(_fpayloadsize, 0)
             pycompat.raisewithtb(exc, tb)
         # end of payload
-        outdebug(ui, 'closing payload chunk')
+        outdebug(ui, b'closing payload chunk')
         yield _pack(_fpayloadsize, 0)
         self._generated = True
 
@@ -1189,8 +1189,8 @@
         Exists to handle the different methods to provide data to a part."""
         # we only support fixed size data now.
         # This will be improved in the future.
-        if util.safehasattr(self.data, 'next') or util.safehasattr(
-            self.data, '__next__'
+        if util.safehasattr(self.data, b'next') or util.safehasattr(
+            self.data, b'__next__'
         ):
             buff = util.chunkbuffer(self.data)
             chunk = buff.read(preferedchunksize)
@@ -1223,9 +1223,9 @@
         headersize = self._unpack(_fpartheadersize)[0]
         if headersize < 0:
             raise error.BundleValueError(
-                'negative part header size: %i' % headersize
+                b'negative part header size: %i' % headersize
             )
-        indebug(self.ui, 'part header size: %i\n' % headersize)
+        indebug(self.ui, b'part header size: %i\n' % headersize)
         if headersize:
             return self._readexact(headersize)
         return None
@@ -1233,12 +1233,12 @@
     def __call__(self):
 
         self.ui.debug(
-            'bundle2-input-stream-interrupt:' ' opening out of band context\n'
+            b'bundle2-input-stream-interrupt:' b' opening out of band context\n'
         )
-        indebug(self.ui, 'bundle2 stream interruption, looking for a part.')
+        indebug(self.ui, b'bundle2 stream interruption, looking for a part.')
         headerblock = self._readpartheader()
         if headerblock is None:
-            indebug(self.ui, 'no part found during interruption.')
+            indebug(self.ui, b'no part found during interruption.')
             return
         part = unbundlepart(self.ui, headerblock, self._fp)
         op = interruptoperation(self.ui)
@@ -1252,7 +1252,7 @@
             if not hardabort:
                 part.consume()
         self.ui.debug(
-            'bundle2-input-stream-interrupt:' ' closing out of band context\n'
+            b'bundle2-input-stream-interrupt:' b' closing out of band context\n'
         )
 
 
@@ -1269,10 +1269,10 @@
 
     @property
     def repo(self):
-        raise error.ProgrammingError('no repo access from stream interruption')
+        raise error.ProgrammingError(b'no repo access from stream interruption')
 
     def gettransaction(self):
-        raise TransactionUnavailable('no repo access from stream interruption')
+        raise TransactionUnavailable(b'no repo access from stream interruption')
 
 
 def decodepayloadchunks(ui, fh):
@@ -1281,7 +1281,7 @@
     Part payload data consists of framed chunks. This function takes
     a file handle and emits those chunks.
     """
-    dolog = ui.configbool('devel', 'bundle2.debug')
+    dolog = ui.configbool(b'devel', b'bundle2.debug')
     debug = ui.debug
 
     headerstruct = struct.Struct(_fpayloadsize)
@@ -1292,7 +1292,7 @@
     read = fh.read
 
     chunksize = unpack(readexactly(fh, headersize))[0]
-    indebug(ui, 'payload chunk size: %i' % chunksize)
+    indebug(ui, b'payload chunk size: %i' % chunksize)
 
     # changegroup.readexactly() is inlined below for performance.
     while chunksize:
@@ -1301,8 +1301,8 @@
             if len(s) < chunksize:
                 raise error.Abort(
                     _(
-                        'stream ended unexpectedly '
-                        ' (got %d bytes, expected %d)'
+                        b'stream ended unexpectedly '
+                        b' (got %d bytes, expected %d)'
                     )
                     % (len(s), chunksize)
                 )
@@ -1314,13 +1314,13 @@
             interrupthandler(ui, fh)()
         else:
             raise error.BundleValueError(
-                'negative payload chunk size: %s' % chunksize
+                b'negative payload chunk size: %s' % chunksize
             )
 
         s = read(headersize)
         if len(s) < headersize:
             raise error.Abort(
-                _('stream ended unexpectedly ' ' (got %d bytes, expected %d)')
+                _(b'stream ended unexpectedly ' b' (got %d bytes, expected %d)')
                 % (len(s), chunksize)
             )
 
@@ -1328,7 +1328,7 @@
 
         # indebug() inlined for performance.
         if dolog:
-            debug('bundle2-input: payload chunk size: %i\n' % chunksize)
+            debug(b'bundle2-input: payload chunk size: %i\n' % chunksize)
 
 
 class unbundlepart(unpackermixin):
@@ -1336,8 +1336,8 @@
 
     def __init__(self, ui, header, fp):
         super(unbundlepart, self).__init__(fp)
-        self._seekable = util.safehasattr(fp, 'seek') and util.safehasattr(
-            fp, 'tell'
+        self._seekable = util.safehasattr(fp, b'seek') and util.safehasattr(
+            fp, b'tell'
         )
         self.ui = ui
         # unbundle state attr
@@ -1384,16 +1384,16 @@
         """read the header and setup the object"""
         typesize = self._unpackheader(_fparttypesize)[0]
         self.type = self._fromheader(typesize)
-        indebug(self.ui, 'part type: "%s"' % self.type)
+        indebug(self.ui, b'part type: "%s"' % self.type)
         self.id = self._unpackheader(_fpartid)[0]
-        indebug(self.ui, 'part id: "%s"' % pycompat.bytestr(self.id))
+        indebug(self.ui, b'part id: "%s"' % pycompat.bytestr(self.id))
         # extract mandatory bit from type
         self.mandatory = self.type != self.type.lower()
         self.type = self.type.lower()
         ## reading parameters
         # param count
         mancount, advcount = self._unpackheader(_fpartparamcount)
-        indebug(self.ui, 'part parameters: %i' % (mancount + advcount))
+        indebug(self.ui, b'part parameters: %i' % (mancount + advcount))
         # param size
         fparamsizes = _makefpartparamsizes(mancount + advcount)
         paramsizes = self._unpackheader(fparamsizes)
@@ -1445,7 +1445,7 @@
         if size is None or len(data) < size:
             if not self.consumed and self._pos:
                 self.ui.debug(
-                    'bundle2-input-part: total payload size %i\n' % self._pos
+                    b'bundle2-input-part: total payload size %i\n' % self._pos
                 )
             self.consumed = True
         return data
@@ -1478,11 +1478,11 @@
     def _payloadchunks(self, chunknum=0):
         '''seek to specified chunk and start yielding data'''
         if len(self._chunkindex) == 0:
-            assert chunknum == 0, 'Must start with chunk 0'
+            assert chunknum == 0, b'Must start with chunk 0'
             self._chunkindex.append((0, self._tellfp()))
         else:
             assert chunknum < len(self._chunkindex), (
-                'Unknown chunk %d' % chunknum
+                b'Unknown chunk %d' % chunknum
             )
             self._seekfp(self._chunkindex[chunknum][1])
 
@@ -1503,7 +1503,7 @@
                 return chunk, 0
             elif ppos > pos:
                 return chunk - 1, pos - self._chunkindex[chunk - 1][0]
-        raise ValueError('Unknown chunk')
+        raise ValueError(b'Unknown chunk')
 
     def tell(self):
         return self._pos
@@ -1521,7 +1521,7 @@
                     chunk = self.read(32768)
             newpos = self._chunkindex[-1][0] - offset
         else:
-            raise ValueError('Unknown whence value: %r' % (whence,))
+            raise ValueError(b'Unknown whence value: %r' % (whence,))
 
         if newpos > self._chunkindex[-1][0] and not self.consumed:
             # Can't use self.consume() here because it advances self._pos.
@@ -1530,14 +1530,14 @@
                 chunk = self.read(32668)
 
         if not 0 <= newpos <= self._chunkindex[-1][0]:
-            raise ValueError('Offset out of range')
+            raise ValueError(b'Offset out of range')
 
         if self._pos != newpos:
             chunk, internaloffset = self._findchunk(newpos)
             self._payloadstream = util.chunkbuffer(self._payloadchunks(chunk))
             adjust = self.read(internaloffset)
             if len(adjust) != internaloffset:
-                raise error.Abort(_('Seek failed\n'))
+                raise error.Abort(_(b'Seek failed\n'))
             self._pos = newpos
 
     def _seekfp(self, offset, whence=0):
@@ -1551,7 +1551,7 @@
         if self._seekable:
             return self._fp.seek(offset, whence)
         else:
-            raise NotImplementedError(_('File pointer is not seekable'))
+            raise NotImplementedError(_(b'File pointer is not seekable'))
 
     def _tellfp(self):
         """return the file offset, or None if file is not seekable
@@ -1575,17 +1575,17 @@
 # These are only the static capabilities.
 # Check the 'getrepocaps' function for the rest.
 capabilities = {
-    'HG20': (),
-    'bookmarks': (),
-    'error': ('abort', 'unsupportedcontent', 'pushraced', 'pushkey'),
-    'listkeys': (),
-    'pushkey': (),
-    'digests': tuple(sorted(util.DIGESTS.keys())),
-    'remote-changegroup': ('http', 'https'),
-    'hgtagsfnodes': (),
-    'rev-branch-cache': (),
-    'phases': ('heads',),
-    'stream': ('v2',),
+    b'HG20': (),
+    b'bookmarks': (),
+    b'error': (b'abort', b'unsupportedcontent', b'pushraced', b'pushkey'),
+    b'listkeys': (),
+    b'pushkey': (),
+    b'digests': tuple(sorted(util.DIGESTS.keys())),
+    b'remote-changegroup': (b'http', b'https'),
+    b'hgtagsfnodes': (),
+    b'rev-branch-cache': (),
+    b'phases': (b'heads',),
+    b'stream': (b'v2',),
 }
 
 
@@ -1598,33 +1598,33 @@
     well as clients advertising their capabilities to servers as part of
     bundle2 requests. The ``role`` argument specifies which is which.
     """
-    if role not in ('client', 'server'):
-        raise error.ProgrammingError('role argument must be client or server')
+    if role not in (b'client', b'server'):
+        raise error.ProgrammingError(b'role argument must be client or server')
 
     caps = capabilities.copy()
-    caps['changegroup'] = tuple(
+    caps[b'changegroup'] = tuple(
         sorted(changegroup.supportedincomingversions(repo))
     )
     if obsolete.isenabled(repo, obsolete.exchangeopt):
-        supportedformat = tuple('V%i' % v for v in obsolete.formats)
-        caps['obsmarkers'] = supportedformat
+        supportedformat = tuple(b'V%i' % v for v in obsolete.formats)
+        caps[b'obsmarkers'] = supportedformat
     if allowpushback:
-        caps['pushback'] = ()
-    cpmode = repo.ui.config('server', 'concurrent-push-mode')
-    if cpmode == 'check-related':
-        caps['checkheads'] = ('related',)
-    if 'phases' in repo.ui.configlist('devel', 'legacy.exchange'):
-        caps.pop('phases')
+        caps[b'pushback'] = ()
+    cpmode = repo.ui.config(b'server', b'concurrent-push-mode')
+    if cpmode == b'check-related':
+        caps[b'checkheads'] = (b'related',)
+    if b'phases' in repo.ui.configlist(b'devel', b'legacy.exchange'):
+        caps.pop(b'phases')
 
     # Don't advertise stream clone support in server mode if not configured.
-    if role == 'server':
+    if role == b'server':
         streamsupported = repo.ui.configbool(
-            'server', 'uncompressed', untrusted=True
+            b'server', b'uncompressed', untrusted=True
         )
-        featuresupported = repo.ui.configbool('server', 'bundle2.stream')
+        featuresupported = repo.ui.configbool(b'server', b'bundle2.stream')
 
         if not streamsupported or not featuresupported:
-            caps.pop('stream')
+            caps.pop(b'stream')
     # Else always advertise support on client, because payload support
     # should always be advertised.
 
@@ -1633,18 +1633,18 @@
 
 def bundle2caps(remote):
     """return the bundle capabilities of a peer as dict"""
-    raw = remote.capable('bundle2')
-    if not raw and raw != '':
+    raw = remote.capable(b'bundle2')
+    if not raw and raw != b'':
         return {}
-    capsblob = urlreq.unquote(remote.capable('bundle2'))
+    capsblob = urlreq.unquote(remote.capable(b'bundle2'))
     return decodecaps(capsblob)
 
 
 def obsmarkersversion(caps):
     """extract the list of supported obsmarkers versions from a bundle2caps dict
     """
-    obscaps = caps.get('obsmarkers', ())
-    return [int(c[1:]) for c in obscaps if c.startswith('V')]
+    obscaps = caps.get(b'obsmarkers', ())
+    return [int(c[1:]) for c in obscaps if c.startswith(b'V')]
 
 
 def writenewbundle(
@@ -1659,8 +1659,8 @@
     compression=None,
     compopts=None,
 ):
-    if bundletype.startswith('HG10'):
-        cg = changegroup.makechangegroup(repo, outgoing, '01', source)
+    if bundletype.startswith(b'HG10'):
+        cg = changegroup.makechangegroup(repo, outgoing, b'01', source)
         return writebundle(
             ui,
             cg,
@@ -1670,12 +1670,12 @@
             compression=compression,
             compopts=compopts,
         )
-    elif not bundletype.startswith('HG20'):
-        raise error.ProgrammingError('unknown bundle type: %s' % bundletype)
+    elif not bundletype.startswith(b'HG20'):
+        raise error.ProgrammingError(b'unknown bundle type: %s' % bundletype)
 
     caps = {}
-    if 'obsolescence' in opts:
-        caps['obsmarkers'] = ('V1',)
+    if b'obsolescence' in opts:
+        caps[b'obsmarkers'] = (b'V1',)
     bundle = bundle20(ui, caps)
     bundle.setcompression(compression, compopts)
     _addpartsfromopts(ui, repo, bundle, source, outgoing, opts)
@@ -1694,39 +1694,41 @@
 
     # we might not always want a changegroup in such bundle, for example in
     # stream bundles
-    if opts.get('changegroup', True):
-        cgversion = opts.get('cg.version')
+    if opts.get(b'changegroup', True):
+        cgversion = opts.get(b'cg.version')
         if cgversion is None:
             cgversion = changegroup.safeversion(repo)
         cg = changegroup.makechangegroup(repo, outgoing, cgversion, source)
-        part = bundler.newpart('changegroup', data=cg.getchunks())
-        part.addparam('version', cg.version)
-        if 'clcount' in cg.extras:
+        part = bundler.newpart(b'changegroup', data=cg.getchunks())
+        part.addparam(b'version', cg.version)
+        if b'clcount' in cg.extras:
             part.addparam(
-                'nbchanges', '%d' % cg.extras['clcount'], mandatory=False
+                b'nbchanges', b'%d' % cg.extras[b'clcount'], mandatory=False
             )
-        if opts.get('phases') and repo.revs(
-            '%ln and secret()', outgoing.missingheads
+        if opts.get(b'phases') and repo.revs(
+            b'%ln and secret()', outgoing.missingheads
         ):
-            part.addparam('targetphase', '%d' % phases.secret, mandatory=False)
-
-    if opts.get('streamv2', False):
+            part.addparam(
+                b'targetphase', b'%d' % phases.secret, mandatory=False
+            )
+
+    if opts.get(b'streamv2', False):
         addpartbundlestream2(bundler, repo, stream=True)
 
-    if opts.get('tagsfnodescache', True):
+    if opts.get(b'tagsfnodescache', True):
         addparttagsfnodescache(repo, bundler, outgoing)
 
-    if opts.get('revbranchcache', True):
+    if opts.get(b'revbranchcache', True):
         addpartrevbranchcache(repo, bundler, outgoing)
 
-    if opts.get('obsolescence', False):
+    if opts.get(b'obsolescence', False):
         obsmarkers = repo.obsstore.relevantmarkers(outgoing.missing)
         buildobsmarkerspart(bundler, obsmarkers)
 
-    if opts.get('phases', False):
+    if opts.get(b'phases', False):
         headsbyphase = phases.subsetphaseheads(repo, outgoing.missing)
         phasedata = phases.binaryencode(headsbyphase)
-        bundler.newpart('phase-heads', data=phasedata)
+        bundler.newpart(b'phase-heads', data=phasedata)
 
 
 def addparttagsfnodescache(repo, bundler, outgoing):
@@ -1751,7 +1753,7 @@
             chunks.extend([node, fnode])
 
     if chunks:
-        bundler.newpart('hgtagsfnodes', data=''.join(chunks))
+        bundler.newpart(b'hgtagsfnodes', data=b''.join(chunks))
 
 
 def addpartrevbranchcache(repo, bundler, outgoing):
@@ -1774,17 +1776,17 @@
             for n in sorted(closed):
                 yield n
 
-    bundler.newpart('cache:rev-branch-cache', data=generate(), mandatory=False)
+    bundler.newpart(b'cache:rev-branch-cache', data=generate(), mandatory=False)
 
 
 def _formatrequirementsspec(requirements):
-    requirements = [req for req in requirements if req != "shared"]
-    return urlreq.quote(','.join(sorted(requirements)))
+    requirements = [req for req in requirements if req != b"shared"]
+    return urlreq.quote(b','.join(sorted(requirements)))
 
 
 def _formatrequirementsparams(requirements):
     requirements = _formatrequirementsspec(requirements)
-    params = "%s%s" % (urlreq.quote("requirements="), requirements)
+    params = b"%s%s" % (urlreq.quote(b"requirements="), requirements)
     return params
 
 
@@ -1795,13 +1797,13 @@
     if not streamclone.allowservergeneration(repo):
         raise error.Abort(
             _(
-                'stream data requested but server does not allow '
-                'this feature'
+                b'stream data requested but server does not allow '
+                b'this feature'
             ),
             hint=_(
-                'well-behaved clients should not be '
-                'requesting stream data from servers not '
-                'advertising it; the client may be buggy'
+                b'well-behaved clients should not be '
+                b'requesting stream data from servers not '
+                b'advertising it; the client may be buggy'
             ),
         )
 
@@ -1815,11 +1817,11 @@
     excludepats = kwargs.get(r'excludepats')
 
     narrowstream = repo.ui.configbool(
-        'experimental', 'server.stream-narrow-clones'
+        b'experimental', b'server.stream-narrow-clones'
     )
 
     if (includepats or excludepats) and not narrowstream:
-        raise error.Abort(_('server does not support narrow stream clones'))
+        raise error.Abort(_(b'server does not support narrow stream clones'))
 
     includeobsmarkers = False
     if repo.obsstore:
@@ -1827,8 +1829,8 @@
         if not remoteversions:
             raise error.Abort(
                 _(
-                    'server has obsolescence markers, but client '
-                    'cannot receive them via stream clone'
+                    b'server has obsolescence markers, but client '
+                    b'cannot receive them via stream clone'
                 )
             )
         elif repo.obsstore._version in remoteversions:
@@ -1838,10 +1840,10 @@
         repo, includepats, excludepats, includeobsmarkers
     )
     requirements = _formatrequirementsspec(repo.requirements)
-    part = bundler.newpart('stream2', data=it)
-    part.addparam('bytecount', '%d' % bytecount, mandatory=True)
-    part.addparam('filecount', '%d' % filecount, mandatory=True)
-    part.addparam('requirements', requirements, mandatory=True)
+    part = bundler.newpart(b'stream2', data=it)
+    part.addparam(b'bytecount', b'%d' % bytecount, mandatory=True)
+    part.addparam(b'filecount', b'%d' % filecount, mandatory=True)
+    part.addparam(b'requirements', requirements, mandatory=True)
 
 
 def buildobsmarkerspart(bundler, markers):
@@ -1856,9 +1858,9 @@
     remoteversions = obsmarkersversion(bundler.capabilities)
     version = obsolete.commonversion(remoteversions)
     if version is None:
-        raise ValueError('bundler does not support common obsmarker format')
+        raise ValueError(b'bundler does not support common obsmarker format')
     stream = obsolete.encodemarkers(markers, True, version=version)
-    return bundler.newpart('obsmarkers', data=stream)
+    return bundler.newpart(b'obsmarkers', data=stream)
 
 
 def writebundle(
@@ -1872,26 +1874,26 @@
     The bundle file will be deleted in case of errors.
     """
 
-    if bundletype == "HG20":
+    if bundletype == b"HG20":
         bundle = bundle20(ui)
         bundle.setcompression(compression, compopts)
-        part = bundle.newpart('changegroup', data=cg.getchunks())
-        part.addparam('version', cg.version)
-        if 'clcount' in cg.extras:
+        part = bundle.newpart(b'changegroup', data=cg.getchunks())
+        part.addparam(b'version', cg.version)
+        if b'clcount' in cg.extras:
             part.addparam(
-                'nbchanges', '%d' % cg.extras['clcount'], mandatory=False
+                b'nbchanges', b'%d' % cg.extras[b'clcount'], mandatory=False
             )
         chunkiter = bundle.getchunks()
     else:
         # compression argument is only for the bundle2 case
         assert compression is None
-        if cg.version != '01':
+        if cg.version != b'01':
             raise error.Abort(
-                _('old bundle types only supports v1 ' 'changegroups')
+                _(b'old bundle types only supports v1 ' b'changegroups')
             )
         header, comp = bundletypes[bundletype]
         if comp not in util.compengines.supportedbundletypes:
-            raise error.Abort(_('unknown stream compression type: %s') % comp)
+            raise error.Abort(_(b'unknown stream compression type: %s') % comp)
         compengine = util.compengines.forbundletype(comp)
 
         def chunkiter():
@@ -1908,7 +1910,7 @@
 
 def combinechangegroupresults(op):
     """logic to combine 0 or more addchangegroup results into one"""
-    results = [r.get('return', 0) for r in op.records['changegroup']]
+    results = [r.get(b'return', 0) for r in op.records[b'changegroup']]
     changedheads = 0
     result = 1
     for ret in results:
@@ -1928,7 +1930,7 @@
 
 
 @parthandler(
-    'changegroup', ('version', 'nbchanges', 'treemanifest', 'targetphase')
+    b'changegroup', (b'version', b'nbchanges', b'treemanifest', b'targetphase')
 )
 def handlechangegroup(op, inpart):
     """apply a changegroup part on the repo
@@ -1939,60 +1941,61 @@
     from . import localrepo
 
     tr = op.gettransaction()
-    unpackerversion = inpart.params.get('version', '01')
+    unpackerversion = inpart.params.get(b'version', b'01')
     # We should raise an appropriate exception here
     cg = changegroup.getunbundler(unpackerversion, inpart, None)
     # the source and url passed here are overwritten by the one contained in
     # the transaction.hookargs argument. So 'bundle2' is a placeholder
     nbchangesets = None
-    if 'nbchanges' in inpart.params:
-        nbchangesets = int(inpart.params.get('nbchanges'))
+    if b'nbchanges' in inpart.params:
+        nbchangesets = int(inpart.params.get(b'nbchanges'))
     if (
-        'treemanifest' in inpart.params
-        and 'treemanifest' not in op.repo.requirements
+        b'treemanifest' in inpart.params
+        and b'treemanifest' not in op.repo.requirements
     ):
         if len(op.repo.changelog) != 0:
             raise error.Abort(
                 _(
-                    "bundle contains tree manifests, but local repo is "
-                    "non-empty and does not use tree manifests"
+                    b"bundle contains tree manifests, but local repo is "
+                    b"non-empty and does not use tree manifests"
                 )
             )
-        op.repo.requirements.add('treemanifest')
+        op.repo.requirements.add(b'treemanifest')
         op.repo.svfs.options = localrepo.resolvestorevfsoptions(
             op.repo.ui, op.repo.requirements, op.repo.features
         )
         op.repo._writerequirements()
     extrakwargs = {}
-    targetphase = inpart.params.get('targetphase')
+    targetphase = inpart.params.get(b'targetphase')
     if targetphase is not None:
         extrakwargs[r'targetphase'] = int(targetphase)
     ret = _processchangegroup(
         op,
         cg,
         tr,
-        'bundle2',
-        'bundle2',
+        b'bundle2',
+        b'bundle2',
         expectedtotal=nbchangesets,
         **extrakwargs
     )
     if op.reply is not None:
         # This is definitely not the final form of this
         # return. But one need to start somewhere.
-        part = op.reply.newpart('reply:changegroup', mandatory=False)
+        part = op.reply.newpart(b'reply:changegroup', mandatory=False)
         part.addparam(
-            'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False
+            b'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False
         )
-        part.addparam('return', '%i' % ret, mandatory=False)
+        part.addparam(b'return', b'%i' % ret, mandatory=False)
     assert not inpart.read()
 
 
 _remotechangegroupparams = tuple(
-    ['url', 'size', 'digests'] + ['digest:%s' % k for k in util.DIGESTS.keys()]
+    [b'url', b'size', b'digests']
+    + [b'digest:%s' % k for k in util.DIGESTS.keys()]
 )
 
 
-@parthandler('remote-changegroup', _remotechangegroupparams)
+@parthandler(b'remote-changegroup', _remotechangegroupparams)
 def handleremotechangegroup(op, inpart):
     """apply a bundle10 on the repo, given an url and validation information
 
@@ -2010,32 +2013,35 @@
     When multiple digest types are given, all of them are checked.
     """
     try:
-        raw_url = inpart.params['url']
+        raw_url = inpart.params[b'url']
     except KeyError:
-        raise error.Abort(_('remote-changegroup: missing "%s" param') % 'url')
+        raise error.Abort(_(b'remote-changegroup: missing "%s" param') % b'url')
     parsed_url = util.url(raw_url)
-    if parsed_url.scheme not in capabilities['remote-changegroup']:
+    if parsed_url.scheme not in capabilities[b'remote-changegroup']:
         raise error.Abort(
-            _('remote-changegroup does not support %s urls') % parsed_url.scheme
+            _(b'remote-changegroup does not support %s urls')
+            % parsed_url.scheme
         )
 
     try:
-        size = int(inpart.params['size'])
+        size = int(inpart.params[b'size'])
     except ValueError:
         raise error.Abort(
-            _('remote-changegroup: invalid value for param "%s"') % 'size'
+            _(b'remote-changegroup: invalid value for param "%s"') % b'size'
         )
     except KeyError:
-        raise error.Abort(_('remote-changegroup: missing "%s" param') % 'size')
+        raise error.Abort(
+            _(b'remote-changegroup: missing "%s" param') % b'size'
+        )
 
     digests = {}
-    for typ in inpart.params.get('digests', '').split():
-        param = 'digest:%s' % typ
+    for typ in inpart.params.get(b'digests', b'').split():
+        param = b'digest:%s' % typ
         try:
             value = inpart.params[param]
         except KeyError:
             raise error.Abort(
-                _('remote-changegroup: missing "%s" param') % param
+                _(b'remote-changegroup: missing "%s" param') % param
             )
         digests[typ] = value
 
@@ -2047,35 +2053,35 @@
     cg = exchange.readbundle(op.repo.ui, real_part, raw_url)
     if not isinstance(cg, changegroup.cg1unpacker):
         raise error.Abort(
-            _('%s: not a bundle version 1.0') % util.hidepassword(raw_url)
+            _(b'%s: not a bundle version 1.0') % util.hidepassword(raw_url)
         )
-    ret = _processchangegroup(op, cg, tr, 'bundle2', 'bundle2')
+    ret = _processchangegroup(op, cg, tr, b'bundle2', b'bundle2')
     if op.reply is not None:
         # This is definitely not the final form of this
         # return. But one need to start somewhere.
-        part = op.reply.newpart('reply:changegroup')
+        part = op.reply.newpart(b'reply:changegroup')
         part.addparam(
-            'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False
+            b'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False
         )
-        part.addparam('return', '%i' % ret, mandatory=False)
+        part.addparam(b'return', b'%i' % ret, mandatory=False)
     try:
         real_part.validate()
     except error.Abort as e:
         raise error.Abort(
-            _('bundle at %s is corrupted:\n%s')
+            _(b'bundle at %s is corrupted:\n%s')
             % (util.hidepassword(raw_url), bytes(e))
         )
     assert not inpart.read()
 
 
-@parthandler('reply:changegroup', ('return', 'in-reply-to'))
+@parthandler(b'reply:changegroup', (b'return', b'in-reply-to'))
 def handlereplychangegroup(op, inpart):
-    ret = int(inpart.params['return'])
-    replyto = int(inpart.params['in-reply-to'])
-    op.records.add('changegroup', {'return': ret}, replyto)
-
-
-@parthandler('check:bookmarks')
+    ret = int(inpart.params[b'return'])
+    replyto = int(inpart.params[b'in-reply-to'])
+    op.records.add(b'changegroup', {b'return': ret}, replyto)
+
+
+@parthandler(b'check:bookmarks')
 def handlecheckbookmarks(op, inpart):
     """check location of bookmarks
 
@@ -2086,16 +2092,16 @@
     bookdata = bookmarks.binarydecode(inpart)
 
     msgstandard = (
-        'remote repository changed while pushing - please try again '
-        '(bookmark "%s" move from %s to %s)'
+        b'remote repository changed while pushing - please try again '
+        b'(bookmark "%s" move from %s to %s)'
     )
     msgmissing = (
-        'remote repository changed while pushing - please try again '
-        '(bookmark "%s" is missing, expected %s)'
+        b'remote repository changed while pushing - please try again '
+        b'(bookmark "%s" is missing, expected %s)'
     )
     msgexist = (
-        'remote repository changed while pushing - please try again '
-        '(bookmark "%s" set on %s, expected missing)'
+        b'remote repository changed while pushing - please try again '
+        b'(bookmark "%s" set on %s, expected missing)'
     )
     for book, node in bookdata:
         currentnode = op.repo._bookmarks.get(book)
@@ -2113,7 +2119,7 @@
             raise error.PushRaced(finalmsg)
 
 
-@parthandler('check:heads')
+@parthandler(b'check:heads')
 def handlecheckheads(op, inpart):
     """check that head of the repo did not change
 
@@ -2126,15 +2132,15 @@
         h = inpart.read(20)
     assert not h
     # Trigger a transaction so that we are guaranteed to have the lock now.
-    if op.ui.configbool('experimental', 'bundle2lazylocking'):
+    if op.ui.configbool(b'experimental', b'bundle2lazylocking'):
         op.gettransaction()
     if sorted(heads) != sorted(op.repo.heads()):
         raise error.PushRaced(
-            'remote repository changed while pushing - ' 'please try again'
+            b'remote repository changed while pushing - ' b'please try again'
         )
 
 
-@parthandler('check:updated-heads')
+@parthandler(b'check:updated-heads')
 def handlecheckupdatedheads(op, inpart):
     """check for race on the heads touched by a push
 
@@ -2151,7 +2157,7 @@
         h = inpart.read(20)
     assert not h
     # trigger a transaction so that we are guaranteed to have the lock now.
-    if op.ui.configbool('experimental', 'bundle2lazylocking'):
+    if op.ui.configbool(b'experimental', b'bundle2lazylocking'):
         op.gettransaction()
 
     currentheads = set()
@@ -2161,11 +2167,12 @@
     for h in heads:
         if h not in currentheads:
             raise error.PushRaced(
-                'remote repository changed while pushing - ' 'please try again'
+                b'remote repository changed while pushing - '
+                b'please try again'
             )
 
 
-@parthandler('check:phases')
+@parthandler(b'check:phases')
 def handlecheckphases(op, inpart):
     """check that phase boundaries of the repository did not change
 
@@ -2176,8 +2183,8 @@
     cl = unfi.changelog
     phasecache = unfi._phasecache
     msg = (
-        'remote repository changed while pushing - please try again '
-        '(%s is %s expected %s)'
+        b'remote repository changed while pushing - please try again '
+        b'(%s is %s expected %s)'
     )
     for expectedphase, nodes in enumerate(phasetonodes):
         for n in nodes:
@@ -2191,14 +2198,14 @@
                 raise error.PushRaced(finalmsg)
 
 
-@parthandler('output')
+@parthandler(b'output')
 def handleoutput(op, inpart):
     """forward output captured on the server to the client"""
     for line in inpart.read().splitlines():
-        op.ui.status(_('remote: %s\n') % line)
-
-
-@parthandler('replycaps')
+        op.ui.status(_(b'remote: %s\n') % line)
+
+
+@parthandler(b'replycaps')
 def handlereplycaps(op, inpart):
     """Notify that a reply bundle should be created
 
@@ -2212,89 +2219,90 @@
     """Sub-class of Abort that denotes an error from a bundle2 part."""
 
 
-@parthandler('error:abort', ('message', 'hint'))
+@parthandler(b'error:abort', (b'message', b'hint'))
 def handleerrorabort(op, inpart):
     """Used to transmit abort error over the wire"""
     raise AbortFromPart(
-        inpart.params['message'], hint=inpart.params.get('hint')
+        inpart.params[b'message'], hint=inpart.params.get(b'hint')
     )
 
 
 @parthandler(
-    'error:pushkey', ('namespace', 'key', 'new', 'old', 'ret', 'in-reply-to')
+    b'error:pushkey',
+    (b'namespace', b'key', b'new', b'old', b'ret', b'in-reply-to'),
 )
 def handleerrorpushkey(op, inpart):
     """Used to transmit failure of a mandatory pushkey over the wire"""
     kwargs = {}
-    for name in ('namespace', 'key', 'new', 'old', 'ret'):
+    for name in (b'namespace', b'key', b'new', b'old', b'ret'):
         value = inpart.params.get(name)
         if value is not None:
             kwargs[name] = value
     raise error.PushkeyFailed(
-        inpart.params['in-reply-to'], **pycompat.strkwargs(kwargs)
+        inpart.params[b'in-reply-to'], **pycompat.strkwargs(kwargs)
     )
 
 
-@parthandler('error:unsupportedcontent', ('parttype', 'params'))
+@parthandler(b'error:unsupportedcontent', (b'parttype', b'params'))
 def handleerrorunsupportedcontent(op, inpart):
     """Used to transmit unknown content error over the wire"""
     kwargs = {}
-    parttype = inpart.params.get('parttype')
+    parttype = inpart.params.get(b'parttype')
     if parttype is not None:
-        kwargs['parttype'] = parttype
-    params = inpart.params.get('params')
+        kwargs[b'parttype'] = parttype
+    params = inpart.params.get(b'params')
     if params is not None:
-        kwargs['params'] = params.split('\0')
+        kwargs[b'params'] = params.split(b'\0')
 
     raise error.BundleUnknownFeatureError(**pycompat.strkwargs(kwargs))
 
 
-@parthandler('error:pushraced', ('message',))
+@parthandler(b'error:pushraced', (b'message',))
 def handleerrorpushraced(op, inpart):
     """Used to transmit push race error over the wire"""
-    raise error.ResponseError(_('push failed:'), inpart.params['message'])
-
-
-@parthandler('listkeys', ('namespace',))
+    raise error.ResponseError(_(b'push failed:'), inpart.params[b'message'])
+
+
+@parthandler(b'listkeys', (b'namespace',))
 def handlelistkeys(op, inpart):
     """retrieve pushkey namespace content stored in a bundle2"""
-    namespace = inpart.params['namespace']
+    namespace = inpart.params[b'namespace']
     r = pushkey.decodekeys(inpart.read())
-    op.records.add('listkeys', (namespace, r))
-
-
-@parthandler('pushkey', ('namespace', 'key', 'old', 'new'))
+    op.records.add(b'listkeys', (namespace, r))
+
+
+@parthandler(b'pushkey', (b'namespace', b'key', b'old', b'new'))
 def handlepushkey(op, inpart):
     """process a pushkey request"""
     dec = pushkey.decode
-    namespace = dec(inpart.params['namespace'])
-    key = dec(inpart.params['key'])
-    old = dec(inpart.params['old'])
-    new = dec(inpart.params['new'])
+    namespace = dec(inpart.params[b'namespace'])
+    key = dec(inpart.params[b'key'])
+    old = dec(inpart.params[b'old'])
+    new = dec(inpart.params[b'new'])
     # Grab the transaction to ensure that we have the lock before performing the
     # pushkey.
-    if op.ui.configbool('experimental', 'bundle2lazylocking'):
+    if op.ui.configbool(b'experimental', b'bundle2lazylocking'):
         op.gettransaction()
     ret = op.repo.pushkey(namespace, key, old, new)
-    record = {'namespace': namespace, 'key': key, 'old': old, 'new': new}
-    op.records.add('pushkey', record)
+    record = {b'namespace': namespace, b'key': key, b'old': old, b'new': new}
+    op.records.add(b'pushkey', record)
     if op.reply is not None:
-        rpart = op.reply.newpart('reply:pushkey')
+        rpart = op.reply.newpart(b'reply:pushkey')
         rpart.addparam(
-            'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False
+            b'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False
         )
-        rpart.addparam('return', '%i' % ret, mandatory=False)
+        rpart.addparam(b'return', b'%i' % ret, mandatory=False)
     if inpart.mandatory and not ret:
         kwargs = {}
-        for key in ('namespace', 'key', 'new', 'old', 'ret'):
+        for key in (b'namespace', b'key', b'new', b'old', b'ret'):
             if key in inpart.params:
                 kwargs[key] = inpart.params[key]
         raise error.PushkeyFailed(
-            partid='%d' % inpart.id, **pycompat.strkwargs(kwargs)
+            partid=b'%d' % inpart.id, **pycompat.strkwargs(kwargs)
         )
 
 
-@parthandler('bookmarks')
+@parthandler(b'bookmarks')
 def handlebookmark(op, inpart):
     """transmit bookmark information
 
@@ -2313,26 +2321,30 @@
     """
     changes = bookmarks.binarydecode(inpart)
 
-    pushkeycompat = op.repo.ui.configbool('server', 'bookmarks-pushkey-compat')
-    bookmarksmode = op.modes.get('bookmarks', 'apply')
-
-    if bookmarksmode == 'apply':
+    pushkeycompat = op.repo.ui.configbool(
+        b'server', b'bookmarks-pushkey-compat'
+    )
+    bookmarksmode = op.modes.get(b'bookmarks', b'apply')
+
+    if bookmarksmode == b'apply':
         tr = op.gettransaction()
         bookstore = op.repo._bookmarks
         if pushkeycompat:
             allhooks = []
             for book, node in changes:
                 hookargs = tr.hookargs.copy()
-                hookargs['pushkeycompat'] = '1'
-                hookargs['namespace'] = 'bookmarks'
-                hookargs['key'] = book
-                hookargs['old'] = nodemod.hex(bookstore.get(book, ''))
-                hookargs['new'] = nodemod.hex(node if node is not None else '')
+                hookargs[b'pushkeycompat'] = b'1'
+                hookargs[b'namespace'] = b'bookmarks'
+                hookargs[b'key'] = book
+                hookargs[b'old'] = nodemod.hex(bookstore.get(book, b''))
+                hookargs[b'new'] = nodemod.hex(
+                    node if node is not None else b''
+                )
                 allhooks.append(hookargs)
 
             for hookargs in allhooks:
                 op.repo.hook(
-                    'prepushkey', throw=True, **pycompat.strkwargs(hookargs)
+                    b'prepushkey', throw=True, **pycompat.strkwargs(hookargs)
                 )
 
         bookstore.applychanges(op.repo, op.gettransaction(), changes)
@@ -2341,72 +2353,78 @@
 
             def runhook():
                 for hookargs in allhooks:
-                    op.repo.hook('pushkey', **pycompat.strkwargs(hookargs))
+                    op.repo.hook(b'pushkey', **pycompat.strkwargs(hookargs))
 
             op.repo._afterlock(runhook)
 
-    elif bookmarksmode == 'records':
+    elif bookmarksmode == b'records':
         for book, node in changes:
-            record = {'bookmark': book, 'node': node}
-            op.records.add('bookmarks', record)
+            record = {b'bookmark': book, b'node': node}
+            op.records.add(b'bookmarks', record)
     else:
-        raise error.ProgrammingError('unkown bookmark mode: %s' % bookmarksmode)
-
-
-@parthandler('phase-heads')
+        raise error.ProgrammingError(
+            b'unkown bookmark mode: %s' % bookmarksmode
+        )
+
+
+@parthandler(b'phase-heads')
 def handlephases(op, inpart):
     """apply phases from bundle part to repo"""
     headsbyphase = phases.binarydecode(inpart)
     phases.updatephases(op.repo.unfiltered(), op.gettransaction, headsbyphase)
 
 
-@parthandler('reply:pushkey', ('return', 'in-reply-to'))
+@parthandler(b'reply:pushkey', (b'return', b'in-reply-to'))
 def handlepushkeyreply(op, inpart):
     """retrieve the result of a pushkey request"""
-    ret = int(inpart.params['return'])
-    partid = int(inpart.params['in-reply-to'])
-    op.records.add('pushkey', {'return': ret}, partid)
-
-
-@parthandler('obsmarkers')
+    ret = int(inpart.params[b'return'])
+    partid = int(inpart.params[b'in-reply-to'])
+    op.records.add(b'pushkey', {b'return': ret}, partid)
+
+
+@parthandler(b'obsmarkers')
 def handleobsmarker(op, inpart):
     """add a stream of obsmarkers to the repo"""
     tr = op.gettransaction()
     markerdata = inpart.read()
-    if op.ui.config('experimental', 'obsmarkers-exchange-debug'):
-        op.ui.write('obsmarker-exchange: %i bytes received\n' % len(markerdata))
+    if op.ui.config(b'experimental', b'obsmarkers-exchange-debug'):
+        op.ui.write(
+            b'obsmarker-exchange: %i bytes received\n' % len(markerdata)
+        )
     # The mergemarkers call will crash if marker creation is not enabled.
     # we want to avoid this if the part is advisory.
     if not inpart.mandatory and op.repo.obsstore.readonly:
-        op.repo.ui.debug('ignoring obsolescence markers, feature not enabled\n')
+        op.repo.ui.debug(
+            b'ignoring obsolescence markers, feature not enabled\n'
+        )
         return
     new = op.repo.obsstore.mergemarkers(tr, markerdata)
     op.repo.invalidatevolatilesets()
-    op.records.add('obsmarkers', {'new': new})
+    op.records.add(b'obsmarkers', {b'new': new})
     if op.reply is not None:
-        rpart = op.reply.newpart('reply:obsmarkers')
+        rpart = op.reply.newpart(b'reply:obsmarkers')
         rpart.addparam(
-            'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False
+            b'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False
         )
-        rpart.addparam('new', '%i' % new, mandatory=False)
-
-
-@parthandler('reply:obsmarkers', ('new', 'in-reply-to'))
+        rpart.addparam(b'new', b'%i' % new, mandatory=False)
+
+
+@parthandler(b'reply:obsmarkers', (b'new', b'in-reply-to'))
 def handleobsmarkerreply(op, inpart):
     """retrieve the result of a pushkey request"""
-    ret = int(inpart.params['new'])
-    partid = int(inpart.params['in-reply-to'])
-    op.records.add('obsmarkers', {'new': ret}, partid)
-
-
-@parthandler('hgtagsfnodes')
+    ret = int(inpart.params[b'new'])
+    partid = int(inpart.params[b'in-reply-to'])
+    op.records.add(b'obsmarkers', {b'new': ret}, partid)
+
+
+@parthandler(b'hgtagsfnodes')
 def handlehgtagsfnodes(op, inpart):
     """Applies .hgtags fnodes cache entries to the local repo.
 
     Payload is pairs of 20 byte changeset nodes and filenodes.
     """
     # Grab the transaction so we ensure that we have the lock at this point.
-    if op.ui.configbool('experimental', 'bundle2lazylocking'):
+    if op.ui.configbool(b'experimental', b'bundle2lazylocking'):
         op.gettransaction()
     cache = tags.hgtagsfnodescache(op.repo.unfiltered())
 
@@ -2415,19 +2433,19 @@
         node = inpart.read(20)
         fnode = inpart.read(20)
         if len(node) < 20 or len(fnode) < 20:
-            op.ui.debug('ignoring incomplete received .hgtags fnodes data\n')
+            op.ui.debug(b'ignoring incomplete received .hgtags fnodes data\n')
             break
         cache.setfnode(node, fnode)
         count += 1
 
     cache.write()
-    op.ui.debug('applied %i hgtags fnodes cache entries\n' % count)
-
-
-rbcstruct = struct.Struct('>III')
-
-
-@parthandler('cache:rev-branch-cache')
+    op.ui.debug(b'applied %i hgtags fnodes cache entries\n' % count)
+
+
+rbcstruct = struct.Struct(b'>III')
+
+
+@parthandler(b'cache:rev-branch-cache')
 def handlerbc(op, inpart):
     """receive a rev-branch-cache payload and update the local cache
 
@@ -2460,34 +2478,34 @@
     cache.write()
 
 
-@parthandler('pushvars')
+@parthandler(b'pushvars')
 def bundle2getvars(op, part):
     '''unbundle a bundle2 containing shellvars on the server'''
     # An option to disable unbundling on server-side for security reasons
-    if op.ui.configbool('push', 'pushvars.server'):
+    if op.ui.configbool(b'push', b'pushvars.server'):
         hookargs = {}
         for key, value in part.advisoryparams:
             key = key.upper()
             # We want pushed variables to have USERVAR_ prepended so we know
             # they came from the --pushvar flag.
-            key = "USERVAR_" + key
+            key = b"USERVAR_" + key
             hookargs[key] = value
         op.addhookargs(hookargs)
 
 
-@parthandler('stream2', ('requirements', 'filecount', 'bytecount'))
+@parthandler(b'stream2', (b'requirements', b'filecount', b'bytecount'))
 def handlestreamv2bundle(op, part):
 
-    requirements = urlreq.unquote(part.params['requirements']).split(',')
-    filecount = int(part.params['filecount'])
-    bytecount = int(part.params['bytecount'])
+    requirements = urlreq.unquote(part.params[b'requirements']).split(b',')
+    filecount = int(part.params[b'filecount'])
+    bytecount = int(part.params[b'bytecount'])
 
     repo = op.repo
     if len(repo):
-        msg = _('cannot apply stream clone to non empty repository')
+        msg = _(b'cannot apply stream clone to non empty repository')
         raise error.Abort(msg)
 
-    repo.ui.debug('applying stream bundle\n')
+    repo.ui.debug(b'applying stream bundle\n')
     streamclone.applybundlev2(repo, part, filecount, bytecount, requirements)
 
 
@@ -2509,7 +2527,7 @@
     """
     commonnodes = set()
     cl = repo.changelog
-    for r in repo.revs("::%ln", common):
+    for r in repo.revs(b"::%ln", common):
         commonnodes.add(cl.node(r))
     if commonnodes:
         # XXX: we should only send the filelogs (and treemanifest). user
@@ -2525,13 +2543,13 @@
             {nodemod.nullid},
             list(commonnodes),
             False,
-            'narrow_widen',
+            b'narrow_widen',
             changelog=False,
         )
 
-        part = bundler.newpart('changegroup', data=cgdata)
-        part.addparam('version', cgversion)
-        if 'treemanifest' in repo.requirements:
-            part.addparam('treemanifest', '1')
+        part = bundler.newpart(b'changegroup', data=cgdata)
+        part.addparam(b'version', cgversion)
+        if b'treemanifest' in repo.requirements:
+            part.addparam(b'treemanifest', b'1')
 
     return bundler
--- a/mercurial/bundlerepo.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/bundlerepo.py	Sun Oct 06 09:48:39 2019 -0400
@@ -72,12 +72,12 @@
             for p in (p1, p2):
                 if p not in self.nodemap:
                     raise error.LookupError(
-                        p, self.indexfile, _("unknown parent")
+                        p, self.indexfile, _(b"unknown parent")
                     )
 
             if deltabase not in self.nodemap:
                 raise LookupError(
-                    deltabase, self.indexfile, _('unknown delta base')
+                    deltabase, self.indexfile, _(b'unknown delta base')
                 )
 
             baserev = self.rev(deltabase)
@@ -133,7 +133,7 @@
             chain.append(iterrev)
             iterrev = self.index[iterrev][3]
         if iterrev == nullrev:
-            rawtext = ''
+            rawtext = b''
         elif rawtext is None:
             r = super(bundlerevlog, self)._rawtext(
                 self.node(iterrev), iterrev, _df=_df
@@ -170,7 +170,7 @@
 
 class bundlemanifest(bundlerevlog, manifest.manifestrevlog):
     def __init__(
-        self, opener, cgunpacker, linkmapper, dirlogstarts=None, dir=''
+        self, opener, cgunpacker, linkmapper, dirlogstarts=None, dir=b''
     ):
         manifest.manifestrevlog.__init__(self, opener, tree=dir)
         bundlerevlog.__init__(
@@ -178,7 +178,7 @@
         )
         if dirlogstarts is None:
             dirlogstarts = {}
-            if self.bundle.version == "03":
+            if self.bundle.version == b"03":
                 dirlogstarts = _getfilestarts(self.bundle)
         self._dirlogstarts = dirlogstarts
         self._linkmapper = linkmapper
@@ -212,7 +212,7 @@
 class bundlephasecache(phases.phasecache):
     def __init__(self, *args, **kwargs):
         super(bundlephasecache, self).__init__(*args, **kwargs)
-        if util.safehasattr(self, 'opener'):
+        if util.safehasattr(self, b'opener'):
             self.opener = vfsmod.readonlyvfs(self.opener)
 
     def write(self):
@@ -230,7 +230,7 @@
 def _getfilestarts(cgunpacker):
     filespos = {}
     for chunkdata in iter(cgunpacker.filelogheader, {}):
-        fname = chunkdata['filename']
+        fname = chunkdata[b'filename']
         filespos[fname] = cgunpacker.tell()
         for chunk in iter(lambda: cgunpacker.deltachunk(None), {}):
             pass
@@ -254,10 +254,10 @@
         self._tempparent = tempparent
         self._url = url
 
-        self.ui.setconfig('phases', 'publish', False, 'bundlerepo')
+        self.ui.setconfig(b'phases', b'publish', False, b'bundlerepo')
 
         self.tempfile = None
-        f = util.posixfile(bundlepath, "rb")
+        f = util.posixfile(bundlepath, b"rb")
         bundle = exchange.readbundle(self.ui, f, bundlepath)
 
         if isinstance(bundle, bundle2.unbundle20):
@@ -266,17 +266,17 @@
 
             cgpart = None
             for part in bundle.iterparts(seekable=True):
-                if part.type == 'changegroup':
+                if part.type == b'changegroup':
                     if cgpart:
                         raise NotImplementedError(
-                            "can't process " "multiple changegroups"
+                            b"can't process " b"multiple changegroups"
                         )
                     cgpart = part
 
                 self._handlebundle2part(bundle, part)
 
             if not cgpart:
-                raise error.Abort(_("No changegroups found"))
+                raise error.Abort(_(b"No changegroups found"))
 
             # This is required to placate a later consumer, which expects
             # the payload offset to be at the beginning of the changegroup.
@@ -288,14 +288,16 @@
         elif isinstance(bundle, changegroup.cg1unpacker):
             if bundle.compressed():
                 f = self._writetempbundle(
-                    bundle.read, '.hg10un', header='HG10UN'
+                    bundle.read, b'.hg10un', header=b'HG10UN'
                 )
                 bundle = exchange.readbundle(self.ui, f, bundlepath, self.vfs)
 
             self._bundlefile = bundle
             self._cgunpacker = bundle
         else:
-            raise error.Abort(_('bundle type %s cannot be read') % type(bundle))
+            raise error.Abort(
+                _(b'bundle type %s cannot be read') % type(bundle)
+            )
 
         # dict with the mapping 'filename' -> position in the changegroup.
         self._cgfilespos = {}
@@ -309,24 +311,24 @@
         )
 
     def _handlebundle2part(self, bundle, part):
-        if part.type != 'changegroup':
+        if part.type != b'changegroup':
             return
 
         cgstream = part
-        version = part.params.get('version', '01')
+        version = part.params.get(b'version', b'01')
         legalcgvers = changegroup.supportedincomingversions(self)
         if version not in legalcgvers:
-            msg = _('Unsupported changegroup version: %s')
+            msg = _(b'Unsupported changegroup version: %s')
             raise error.Abort(msg % version)
         if bundle.compressed():
-            cgstream = self._writetempbundle(part.read, '.cg%sun' % version)
+            cgstream = self._writetempbundle(part.read, b'.cg%sun' % version)
 
-        self._cgunpacker = changegroup.getunbundler(version, cgstream, 'UN')
+        self._cgunpacker = changegroup.getunbundler(version, cgstream, b'UN')
 
-    def _writetempbundle(self, readfn, suffix, header=''):
+    def _writetempbundle(self, readfn, suffix, header=b''):
         """Write a temporary file to disk
         """
-        fdtemp, temp = self.vfs.mkstemp(prefix="hg-bundle-", suffix=suffix)
+        fdtemp, temp = self.vfs.mkstemp(prefix=b"hg-bundle-", suffix=suffix)
         self.tempfile = temp
 
         with os.fdopen(fdtemp, r'wb') as fptemp:
@@ -337,7 +339,7 @@
                     break
                 fptemp.write(chunk)
 
-        return self.vfs.open(self.tempfile, mode="rb")
+        return self.vfs.open(self.tempfile, mode=b"rb")
 
     @localrepo.unfilteredpropertycache
     def _phasecache(self):
@@ -432,7 +434,7 @@
     def setparents(self, p1, p2=nullid):
         p1rev = self.changelog.rev(p1)
         p2rev = self.changelog.rev(p2)
-        msg = _("setting parent to node %s that only exists in the bundle\n")
+        msg = _(b"setting parent to node %s that only exists in the bundle\n")
         if self.changelog.repotiprev < p1rev:
             self.ui.warn(msg % nodemod.hex(p1))
         if self.changelog.repotiprev < p2rev:
@@ -442,28 +444,28 @@
 
 def instance(ui, path, create, intents=None, createopts=None):
     if create:
-        raise error.Abort(_('cannot create new bundle repository'))
+        raise error.Abort(_(b'cannot create new bundle repository'))
     # internal config: bundle.mainreporoot
-    parentpath = ui.config("bundle", "mainreporoot")
+    parentpath = ui.config(b"bundle", b"mainreporoot")
     if not parentpath:
         # try to find the correct path to the working directory repo
         parentpath = cmdutil.findrepo(encoding.getcwd())
         if parentpath is None:
-            parentpath = ''
+            parentpath = b''
     if parentpath:
         # Try to make the full path relative so we get a nice, short URL.
         # In particular, we don't want temp dir names in test outputs.
         cwd = encoding.getcwd()
         if parentpath == cwd:
-            parentpath = ''
+            parentpath = b''
         else:
             cwd = pathutil.normasprefix(cwd)
             if parentpath.startswith(cwd):
                 parentpath = parentpath[len(cwd) :]
     u = util.url(path)
     path = u.localpath()
-    if u.scheme == 'bundle':
-        s = path.split("+", 1)
+    if u.scheme == b'bundle':
+        s = path.split(b"+", 1)
         if len(s) == 1:
             repopath, bundlename = parentpath, s[0]
         else:
@@ -477,9 +479,9 @@
 def makebundlerepository(ui, repopath, bundlepath):
     """Make a bundle repository object based on repo and bundle paths."""
     if repopath:
-        url = 'bundle:%s+%s' % (util.expandpath(repopath), bundlepath)
+        url = b'bundle:%s+%s' % (util.expandpath(repopath), bundlepath)
     else:
-        url = 'bundle:%s' % bundlepath
+        url = b'bundle:%s' % bundlepath
 
     # Because we can't make any guarantees about the type of the base
     # repository, we can't have a static class representing the bundle
@@ -565,23 +567,25 @@
         # create a bundle (uncompressed if peer repo is not local)
 
         # developer config: devel.legacy.exchange
-        legexc = ui.configlist('devel', 'legacy.exchange')
-        forcebundle1 = 'bundle2' not in legexc and 'bundle1' in legexc
+        legexc = ui.configlist(b'devel', b'legacy.exchange')
+        forcebundle1 = b'bundle2' not in legexc and b'bundle1' in legexc
         canbundle2 = (
             not forcebundle1
-            and peer.capable('getbundle')
-            and peer.capable('bundle2')
+            and peer.capable(b'getbundle')
+            and peer.capable(b'bundle2')
         )
         if canbundle2:
             with peer.commandexecutor() as e:
                 b2 = e.callcommand(
-                    'getbundle',
+                    b'getbundle',
                     {
-                        'source': 'incoming',
-                        'common': common,
-                        'heads': rheads,
-                        'bundlecaps': exchange.caps20to10(repo, role='client'),
-                        'cg': True,
+                        b'source': b'incoming',
+                        b'common': common,
+                        b'heads': rheads,
+                        b'bundlecaps': exchange.caps20to10(
+                            repo, role=b'client'
+                        ),
+                        b'cg': True,
                     },
                 ).result()
 
@@ -589,41 +593,41 @@
                     ui, b2._forwardchunks(), bundlename
                 )
         else:
-            if peer.capable('getbundle'):
+            if peer.capable(b'getbundle'):
                 with peer.commandexecutor() as e:
                     cg = e.callcommand(
-                        'getbundle',
+                        b'getbundle',
                         {
-                            'source': 'incoming',
-                            'common': common,
-                            'heads': rheads,
+                            b'source': b'incoming',
+                            b'common': common,
+                            b'heads': rheads,
                         },
                     ).result()
-            elif onlyheads is None and not peer.capable('changegroupsubset'):
+            elif onlyheads is None and not peer.capable(b'changegroupsubset'):
                 # compat with older servers when pulling all remote heads
 
                 with peer.commandexecutor() as e:
                     cg = e.callcommand(
-                        'changegroup',
-                        {'nodes': incoming, 'source': 'incoming',},
+                        b'changegroup',
+                        {b'nodes': incoming, b'source': b'incoming',},
                     ).result()
 
                 rheads = None
             else:
                 with peer.commandexecutor() as e:
                     cg = e.callcommand(
-                        'changegroupsubset',
+                        b'changegroupsubset',
                         {
-                            'bases': incoming,
-                            'heads': rheads,
-                            'source': 'incoming',
+                            b'bases': incoming,
+                            b'heads': rheads,
+                            b'source': b'incoming',
                         },
                     ).result()
 
             if localrepo:
-                bundletype = "HG10BZ"
+                bundletype = b"HG10BZ"
             else:
-                bundletype = "HG10UN"
+                bundletype = b"HG10UN"
             fname = bundle = bundle2.writebundle(ui, cg, bundlename, bundletype)
         # keep written bundle?
         if bundlename:
@@ -649,7 +653,7 @@
 
         with peer.commandexecutor() as e:
             remotephases = e.callcommand(
-                'listkeys', {'namespace': 'phases',}
+                b'listkeys', {b'namespace': b'phases',}
             ).result()
 
         pullop = exchange.pulloperation(bundlerepo, peer, heads=reponodes)
--- a/mercurial/cacheutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/cacheutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -13,10 +13,10 @@
     """return the list of cache file valuable to copy during a clone"""
     # In local clones we're copying all nodes, not just served
     # ones. Therefore copy all branch caches over.
-    cachefiles = ['branch2']
-    cachefiles += ['branch2-%s' % f for f in repoview.filtertable]
-    cachefiles += ['rbc-names-v1', 'rbc-revs-v1']
-    cachefiles += ['tags2']
-    cachefiles += ['tags2-%s' % f for f in repoview.filtertable]
-    cachefiles += ['hgtagsfnodes1']
+    cachefiles = [b'branch2']
+    cachefiles += [b'branch2-%s' % f for f in repoview.filtertable]
+    cachefiles += [b'rbc-names-v1', b'rbc-revs-v1']
+    cachefiles += [b'tags2']
+    cachefiles += [b'tags2-%s' % f for f in repoview.filtertable]
+    cachefiles += [b'hgtagsfnodes1']
     return cachefiles
--- a/mercurial/cffi/bdiff.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/cffi/bdiff.py	Sun Oct 06 09:48:39 2019 -0400
@@ -17,11 +17,11 @@
 
 
 def blocks(sa, sb):
-    a = ffi.new("struct bdiff_line**")
-    b = ffi.new("struct bdiff_line**")
-    ac = ffi.new("char[]", str(sa))
-    bc = ffi.new("char[]", str(sb))
-    l = ffi.new("struct bdiff_hunk*")
+    a = ffi.new(b"struct bdiff_line**")
+    b = ffi.new(b"struct bdiff_line**")
+    ac = ffi.new(b"char[]", str(sa))
+    bc = ffi.new(b"char[]", str(sb))
+    l = ffi.new(b"struct bdiff_hunk*")
     try:
         an = lib.bdiff_splitlines(ac, len(sa), a)
         bn = lib.bdiff_splitlines(bc, len(sb), b)
@@ -45,11 +45,11 @@
 
 
 def bdiff(sa, sb):
-    a = ffi.new("struct bdiff_line**")
-    b = ffi.new("struct bdiff_line**")
-    ac = ffi.new("char[]", str(sa))
-    bc = ffi.new("char[]", str(sb))
-    l = ffi.new("struct bdiff_hunk*")
+    a = ffi.new(b"struct bdiff_line**")
+    b = ffi.new(b"struct bdiff_line**")
+    ac = ffi.new(b"char[]", str(sa))
+    bc = ffi.new(b"char[]", str(sb))
+    l = ffi.new(b"struct bdiff_hunk*")
     try:
         an = lib.bdiff_splitlines(ac, len(sa), a)
         bn = lib.bdiff_splitlines(bc, len(sb), b)
@@ -66,7 +66,7 @@
                 lgt = (b[0] + h.b1).l - (b[0] + lb).l
                 rl.append(
                     struct.pack(
-                        ">lll",
+                        b">lll",
                         (a[0] + la).l - a[0].l,
                         (a[0] + h.a1).l - a[0].l,
                         lgt,
@@ -81,4 +81,4 @@
         lib.free(a[0])
         lib.free(b[0])
         lib.bdiff_freehunks(l.next)
-    return "".join(rl)
+    return b"".join(rl)
--- a/mercurial/cffi/bdiffbuild.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/cffi/bdiffbuild.py	Sun Oct 06 09:48:39 2019 -0400
@@ -5,10 +5,10 @@
 
 ffi = cffi.FFI()
 with open(
-    os.path.join(os.path.join(os.path.dirname(__file__), '..'), 'bdiff.c')
+    os.path.join(os.path.join(os.path.dirname(__file__), b'..'), b'bdiff.c')
 ) as f:
     ffi.set_source(
-        "mercurial.cffi._bdiff", f.read(), include_dirs=['mercurial']
+        b"mercurial.cffi._bdiff", f.read(), include_dirs=[b'mercurial']
     )
 ffi.cdef(
     """
--- a/mercurial/cffi/mpatch.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/cffi/mpatch.py	Sun Oct 06 09:48:39 2019 -0400
@@ -18,8 +18,8 @@
 @ffi.def_extern()
 def cffi_get_next_item(arg, pos):
     all, bins = ffi.from_handle(arg)
-    container = ffi.new("struct mpatch_flist*[1]")
-    to_pass = ffi.new("char[]", str(bins[pos]))
+    container = ffi.new(b"struct mpatch_flist*[1]")
+    to_pass = ffi.new(b"char[]", str(bins[pos]))
     all.append(to_pass)
     r = lib.mpatch_decode(to_pass, len(to_pass) - 1, container)
     if r < 0:
@@ -35,15 +35,15 @@
     arg = (all, bins)
     patch = lib.mpatch_fold(ffi.new_handle(arg), lib.cffi_get_next_item, 0, lgt)
     if not patch:
-        raise mpatchError("cannot decode chunk")
+        raise mpatchError(b"cannot decode chunk")
     outlen = lib.mpatch_calcsize(len(text), patch)
     if outlen < 0:
         lib.mpatch_lfree(patch)
-        raise mpatchError("inconsistency detected")
-    buf = ffi.new("char[]", outlen)
+        raise mpatchError(b"inconsistency detected")
+    buf = ffi.new(b"char[]", outlen)
     if lib.mpatch_apply(buf, text, len(text), patch) < 0:
         lib.mpatch_lfree(patch)
-        raise mpatchError("error applying patches")
+        raise mpatchError(b"error applying patches")
     res = ffi.buffer(buf, outlen)[:]
     lib.mpatch_lfree(patch)
     return res
--- a/mercurial/cffi/mpatchbuild.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/cffi/mpatchbuild.py	Sun Oct 06 09:48:39 2019 -0400
@@ -5,11 +5,11 @@
 
 ffi = cffi.FFI()
 mpatch_c = os.path.join(
-    os.path.join(os.path.dirname(__file__), '..', 'mpatch.c')
+    os.path.join(os.path.dirname(__file__), b'..', b'mpatch.c')
 )
 with open(mpatch_c) as f:
     ffi.set_source(
-        "mercurial.cffi._mpatch", f.read(), include_dirs=["mercurial"]
+        b"mercurial.cffi._mpatch", f.read(), include_dirs=[b"mercurial"]
     )
 ffi.cdef(
     """
--- a/mercurial/cffi/osutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/cffi/osutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -40,8 +40,8 @@
             self.st_mtime = st_mtime
             self.st_size = st_size
 
-    tv_sec_ofs = ffi.offsetof("struct timespec", "tv_sec")
-    buf = ffi.new("char[]", listdir_batch_size)
+    tv_sec_ofs = ffi.offsetof(b"struct timespec", b"tv_sec")
+    buf = ffi.new(b"char[]", listdir_batch_size)
 
     def listdirinternal(dfd, req, stat, skip):
         ret = []
@@ -51,20 +51,20 @@
                 break
             if r == -1:
                 raise OSError(ffi.errno, os.strerror(ffi.errno))
-            cur = ffi.cast("val_attrs_t*", buf)
+            cur = ffi.cast(b"val_attrs_t*", buf)
             for i in range(r):
                 lgt = cur.length
-                assert lgt == ffi.cast('uint32_t*', cur)[0]
+                assert lgt == ffi.cast(b'uint32_t*', cur)[0]
                 ofs = cur.name_info.attr_dataoffset
                 str_lgt = cur.name_info.attr_length
-                base_ofs = ffi.offsetof('val_attrs_t', 'name_info')
+                base_ofs = ffi.offsetof(b'val_attrs_t', b'name_info')
                 name = str(
                     ffi.buffer(
-                        ffi.cast("char*", cur) + base_ofs + ofs, str_lgt - 1
+                        ffi.cast(b"char*", cur) + base_ofs + ofs, str_lgt - 1
                     )
                 )
                 tp = attrkinds[cur.obj_type]
-                if name == "." or name == "..":
+                if name == b"." or name == b"..":
                     continue
                 if skip == name and tp == statmod.S_ISDIR:
                     return []
@@ -85,12 +85,12 @@
                 else:
                     ret.append((name, tp))
                 cur = ffi.cast(
-                    "val_attrs_t*", int(ffi.cast("intptr_t", cur)) + lgt
+                    b"val_attrs_t*", int(ffi.cast(b"intptr_t", cur)) + lgt
                 )
         return ret
 
     def listdir(path, stat=False, skip=None):
-        req = ffi.new("struct attrlist*")
+        req = ffi.new(b"struct attrlist*")
         req.bitmapcount = lib.ATTR_BIT_MAP_COUNT
         req.commonattr = (
             lib.ATTR_CMN_RETURNED_ATTRS
--- a/mercurial/cffi/osutilbuild.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/cffi/osutilbuild.py	Sun Oct 06 09:48:39 2019 -0400
@@ -4,7 +4,7 @@
 
 ffi = cffi.FFI()
 ffi.set_source(
-    "mercurial.cffi._osutil",
+    b"mercurial.cffi._osutil",
     """
 #include <sys/attr.h>
 #include <sys/vnode.h>
@@ -22,7 +22,7 @@
     off_t             datalength;
 } __attribute__((aligned(4), packed)) val_attrs_t;
 """,
-    include_dirs=['mercurial'],
+    include_dirs=[b'mercurial'],
 )
 ffi.cdef(
     '''
--- a/mercurial/changegroup.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/changegroup.py	Sun Oct 06 09:48:39 2019 -0400
@@ -30,11 +30,11 @@
 
 from .interfaces import repository
 
-_CHANGEGROUPV1_DELTA_HEADER = struct.Struct("20s20s20s20s")
-_CHANGEGROUPV2_DELTA_HEADER = struct.Struct("20s20s20s20s20s")
-_CHANGEGROUPV3_DELTA_HEADER = struct.Struct(">20s20s20s20s20sH")
+_CHANGEGROUPV1_DELTA_HEADER = struct.Struct(b"20s20s20s20s")
+_CHANGEGROUPV2_DELTA_HEADER = struct.Struct(b"20s20s20s20s20s")
+_CHANGEGROUPV3_DELTA_HEADER = struct.Struct(b">20s20s20s20s20sH")
 
-LFS_REQUIREMENT = 'lfs'
+LFS_REQUIREMENT = b'lfs'
 
 readexactly = util.readexactly
 
@@ -42,22 +42,22 @@
 def getchunk(stream):
     """return the next chunk from stream as a string"""
     d = readexactly(stream, 4)
-    l = struct.unpack(">l", d)[0]
+    l = struct.unpack(b">l", d)[0]
     if l <= 4:
         if l:
-            raise error.Abort(_("invalid chunk length %d") % l)
-        return ""
+            raise error.Abort(_(b"invalid chunk length %d") % l)
+        return b""
     return readexactly(stream, l - 4)
 
 
 def chunkheader(length):
     """return a changegroup chunk header (string)"""
-    return struct.pack(">l", length + 4)
+    return struct.pack(b">l", length + 4)
 
 
 def closechunk():
     """return a changegroup chunk header (string) for a zero-length chunk"""
-    return struct.pack(">l", 0)
+    return struct.pack(b">l", 0)
 
 
 def _fileheader(path):
@@ -77,13 +77,13 @@
     try:
         if filename:
             if vfs:
-                fh = vfs.open(filename, "wb")
+                fh = vfs.open(filename, b"wb")
             else:
                 # Increase default buffer size because default is usually
                 # small (4k is common on Linux).
-                fh = open(filename, "wb", 131072)
+                fh = open(filename, b"wb", 131072)
         else:
-            fd, filename = pycompat.mkstemp(prefix="hg-bundle-", suffix=".hg")
+            fd, filename = pycompat.mkstemp(prefix=b"hg-bundle-", suffix=b".hg")
             fh = os.fdopen(fd, r"wb")
         cleanup = filename
         for c in chunks:
@@ -121,16 +121,16 @@
 
     deltaheader = _CHANGEGROUPV1_DELTA_HEADER
     deltaheadersize = deltaheader.size
-    version = '01'
+    version = b'01'
     _grouplistcount = 1  # One list of files after the manifests
 
     def __init__(self, fh, alg, extras=None):
         if alg is None:
-            alg = 'UN'
+            alg = b'UN'
         if alg not in util.compengines.supportedbundletypes:
-            raise error.Abort(_('unknown stream compression type: %s') % alg)
-        if alg == 'BZ':
-            alg = '_truncatedBZ'
+            raise error.Abort(_(b'unknown stream compression type: %s') % alg)
+        if alg == b'BZ':
+            alg = b'_truncatedBZ'
 
         compengine = util.compengines.forbundletype(alg)
         self._stream = compengine.decompressorreader(fh)
@@ -141,7 +141,7 @@
     # These methods (compressed, read, seek, tell) all appear to only
     # be used by bundlerepo, but it's a little hard to tell.
     def compressed(self):
-        return self._type is not None and self._type != 'UN'
+        return self._type is not None and self._type != b'UN'
 
     def read(self, l):
         return self._stream.read(l)
@@ -157,10 +157,10 @@
 
     def _chunklength(self):
         d = readexactly(self._stream, 4)
-        l = struct.unpack(">l", d)[0]
+        l = struct.unpack(b">l", d)[0]
         if l <= 4:
             if l:
-                raise error.Abort(_("invalid chunk length %d") % l)
+                raise error.Abort(_(b"invalid chunk length %d") % l)
             return 0
         if self.callback:
             self.callback()
@@ -180,7 +180,7 @@
         if not l:
             return {}
         fname = readexactly(self._stream, l)
-        return {'filename': fname}
+        return {b'filename': fname}
 
     def _deltaheader(self, headertuple, prevnode):
         node, p1, p2, cs = headertuple
@@ -280,7 +280,7 @@
         repo = repo.unfiltered()
 
         def csmap(x):
-            repo.ui.debug("add changeset %s\n" % short(x))
+            repo.ui.debug(b"add changeset %s\n" % short(x))
             return len(cl)
 
         def revmap(x):
@@ -293,10 +293,10 @@
             # case we use the top level data. We overwrite the argument
             # because we need to use the top level value (if they exist)
             # in this function.
-            srctype = tr.hookargs.setdefault('source', srctype)
-            tr.hookargs.setdefault('url', url)
+            srctype = tr.hookargs.setdefault(b'source', srctype)
+            tr.hookargs.setdefault(b'url', url)
             repo.hook(
-                'prechangegroup', throw=True, **pycompat.strkwargs(tr.hookargs)
+                b'prechangegroup', throw=True, **pycompat.strkwargs(tr.hookargs)
             )
 
             # write changelog data to temp files so concurrent readers
@@ -307,10 +307,10 @@
 
             trp = weakref.proxy(tr)
             # pull off the changeset group
-            repo.ui.status(_("adding changesets\n"))
+            repo.ui.status(_(b"adding changesets\n"))
             clstart = len(cl)
             progress = repo.ui.makeprogress(
-                _('changesets'), unit=_('chunks'), total=expectedtotal
+                _(b'changesets'), unit=_(b'chunks'), total=expectedtotal
             )
             self.callback = progress.increment
 
@@ -326,8 +326,8 @@
 
             if not cgnodes:
                 repo.ui.develwarn(
-                    'applied empty changelog from changegroup',
-                    config='warn-empty-changegroup',
+                    b'applied empty changelog from changegroup',
+                    config=b'warn-empty-changegroup',
                 )
             clend = len(cl)
             changesets = clend - clstart
@@ -335,16 +335,16 @@
             self.callback = None
 
             # pull off the manifest group
-            repo.ui.status(_("adding manifests\n"))
+            repo.ui.status(_(b"adding manifests\n"))
             # We know that we'll never have more manifests than we had
             # changesets.
             progress = repo.ui.makeprogress(
-                _('manifests'), unit=_('chunks'), total=changesets
+                _(b'manifests'), unit=_(b'chunks'), total=changesets
             )
             self._unpackmanifests(repo, revmap, trp, progress)
 
             needfiles = {}
-            if repo.ui.configbool('server', 'validate'):
+            if repo.ui.configbool(b'server', b'validate'):
                 cl = repo.changelog
                 ml = repo.manifestlog
                 # validate incoming csets have their manifests
@@ -356,16 +356,16 @@
                         needfiles.setdefault(f, set()).add(n)
 
             # process the files
-            repo.ui.status(_("adding file changes\n"))
+            repo.ui.status(_(b"adding file changes\n"))
             newrevs, newfiles = _addchangegroupfiles(
                 repo, self, revmap, trp, efiles, needfiles
             )
 
             # making sure the value exists
-            tr.changes.setdefault('changegroup-count-changesets', 0)
-            tr.changes.setdefault('changegroup-count-revisions', 0)
-            tr.changes.setdefault('changegroup-count-files', 0)
-            tr.changes.setdefault('changegroup-count-heads', 0)
+            tr.changes.setdefault(b'changegroup-count-changesets', 0)
+            tr.changes.setdefault(b'changegroup-count-revisions', 0)
+            tr.changes.setdefault(b'changegroup-count-files', 0)
+            tr.changes.setdefault(b'changegroup-count-heads', 0)
 
             # some code use bundle operation for internal purpose. They usually
             # set `ui.quiet` to do this outside of user sight. Size the report
@@ -377,9 +377,9 @@
             # something better, but this is a good first step to allow the "end
             # of transaction report" to pass tests.
             if not repo.ui.quiet:
-                tr.changes['changegroup-count-changesets'] += changesets
-                tr.changes['changegroup-count-revisions'] += newrevs
-                tr.changes['changegroup-count-files'] += newfiles
+                tr.changes[b'changegroup-count-changesets'] += changesets
+                tr.changes[b'changegroup-count-revisions'] += newrevs
+                tr.changes[b'changegroup-count-files'] += newfiles
 
             deltaheads = 0
             if oldheads:
@@ -391,27 +391,27 @@
 
             # see previous comment about checking ui.quiet
             if not repo.ui.quiet:
-                tr.changes['changegroup-count-heads'] += deltaheads
+                tr.changes[b'changegroup-count-heads'] += deltaheads
             repo.invalidatevolatilesets()
 
             if changesets > 0:
-                if 'node' not in tr.hookargs:
-                    tr.hookargs['node'] = hex(cl.node(clstart))
-                    tr.hookargs['node_last'] = hex(cl.node(clend - 1))
+                if b'node' not in tr.hookargs:
+                    tr.hookargs[b'node'] = hex(cl.node(clstart))
+                    tr.hookargs[b'node_last'] = hex(cl.node(clend - 1))
                     hookargs = dict(tr.hookargs)
                 else:
                     hookargs = dict(tr.hookargs)
-                    hookargs['node'] = hex(cl.node(clstart))
-                    hookargs['node_last'] = hex(cl.node(clend - 1))
+                    hookargs[b'node'] = hex(cl.node(clstart))
+                    hookargs[b'node_last'] = hex(cl.node(clend - 1))
                 repo.hook(
-                    'pretxnchangegroup',
+                    b'pretxnchangegroup',
                     throw=True,
                     **pycompat.strkwargs(hookargs)
                 )
 
             added = [cl.node(r) for r in pycompat.xrange(clstart, clend)]
             phaseall = None
-            if srctype in ('push', 'serve'):
+            if srctype in (b'push', b'serve'):
                 # Old servers can not push the boundary themselves.
                 # New servers won't push the boundary if changeset already
                 # exists locally as secret
@@ -442,24 +442,24 @@
                     if clstart >= len(repo):
                         return
 
-                    repo.hook("changegroup", **pycompat.strkwargs(hookargs))
+                    repo.hook(b"changegroup", **pycompat.strkwargs(hookargs))
 
                     for n in added:
                         args = hookargs.copy()
-                        args['node'] = hex(n)
-                        del args['node_last']
-                        repo.hook("incoming", **pycompat.strkwargs(args))
+                        args[b'node'] = hex(n)
+                        del args[b'node_last']
+                        repo.hook(b"incoming", **pycompat.strkwargs(args))
 
                     newheads = [h for h in repo.heads() if h not in oldheads]
                     repo.ui.log(
-                        "incoming",
-                        "%d incoming changes - new heads: %s\n",
+                        b"incoming",
+                        b"%d incoming changes - new heads: %s\n",
                         len(added),
-                        ', '.join([hex(c[:6]) for c in newheads]),
+                        b', '.join([hex(c[:6]) for c in newheads]),
                     )
 
                 tr.addpostclose(
-                    'changegroup-runhooks-%020i' % clstart,
+                    b'changegroup-runhooks-%020i' % clstart,
                     lambda tr: repo._afterlock(runhooks),
                 )
         finally:
@@ -494,7 +494,7 @@
 
     deltaheader = _CHANGEGROUPV2_DELTA_HEADER
     deltaheadersize = deltaheader.size
-    version = '02'
+    version = b'02'
 
     def _deltaheader(self, headertuple, prevnode):
         node, p1, p2, deltabase, cs = headertuple
@@ -512,7 +512,7 @@
 
     deltaheader = _CHANGEGROUPV3_DELTA_HEADER
     deltaheadersize = deltaheader.size
-    version = '03'
+    version = b'03'
     _grouplistcount = 2  # One list of manifests and one list of files
 
     def _deltaheader(self, headertuple, prevnode):
@@ -523,11 +523,11 @@
         super(cg3unpacker, self)._unpackmanifests(repo, revmap, trp, prog)
         for chunkdata in iter(self.filelogheader, {}):
             # If we get here, there are directory manifests in the changegroup
-            d = chunkdata["filename"]
-            repo.ui.debug("adding %s revisions\n" % d)
+            d = chunkdata[b"filename"]
+            repo.ui.debug(b"adding %s revisions\n" % d)
             deltas = self.deltaiter()
             if not repo.manifestlog.getstorage(d).addgroup(deltas, revmap, trp):
-                raise error.Abort(_("received dir revlog group is empty"))
+                raise error.Abort(_(b"received dir revlog group is empty"))
 
 
 class headerlessfixup(object):
@@ -663,8 +663,8 @@
                 # We failed to resolve a parent for this node, so
                 # we crash the changegroup construction.
                 raise error.Abort(
-                    'unable to resolve parent while packing %r %r'
-                    ' for changeset %r' % (store.indexfile, rev, clrev)
+                    b'unable to resolve parent while packing %r %r'
+                    b' for changeset %r' % (store.indexfile, rev, clrev)
                 )
 
         return nullrev
@@ -710,10 +710,10 @@
     if ischangelog:
         # `hg log` shows changesets in storage order. To preserve order
         # across clones, send out changesets in storage order.
-        nodesorder = 'storage'
+        nodesorder = b'storage'
     elif ellipses:
         nodes = _sortnodesellipsis(store, nodes, cl, lookup)
-        nodesorder = 'nodes'
+        nodesorder = b'nodes'
     else:
         nodesorder = None
 
@@ -777,20 +777,20 @@
     progress = None
     if topic is not None:
         progress = repo.ui.makeprogress(
-            topic, unit=_('chunks'), total=len(nodes)
+            topic, unit=_(b'chunks'), total=len(nodes)
         )
 
-    configtarget = repo.ui.config('devel', 'bundle.delta')
-    if configtarget not in ('', 'p1', 'full'):
+    configtarget = repo.ui.config(b'devel', b'bundle.delta')
+    if configtarget not in (b'', b'p1', b'full'):
         msg = _("""config "devel.bundle.delta" as unknown value: %s""")
         repo.ui.warn(msg % configtarget)
 
     deltamode = repository.CG_DELTAMODE_STD
     if forcedeltaparentprev:
         deltamode = repository.CG_DELTAMODE_PREV
-    elif configtarget == 'p1':
+    elif configtarget == b'p1':
         deltamode = repository.CG_DELTAMODE_P1
-    elif configtarget == 'full':
+    elif configtarget == b'full':
         deltamode = repository.CG_DELTAMODE_FULL
 
     revisions = store.emitrevisions(
@@ -910,7 +910,7 @@
         repo = self._repo
         cl = repo.changelog
 
-        self._verbosenote(_('uncompressed size of bundle content:\n'))
+        self._verbosenote(_(b'uncompressed size of bundle content:\n'))
         size = 0
 
         clstate, deltas = self._generatechangelog(
@@ -925,11 +925,11 @@
         size += len(close)
         yield closechunk()
 
-        self._verbosenote(_('%8.i (changelog)\n') % size)
+        self._verbosenote(_(b'%8.i (changelog)\n') % size)
 
-        clrevorder = clstate['clrevorder']
-        manifests = clstate['manifests']
-        changedfiles = clstate['changedfiles']
+        clrevorder = clstate[b'clrevorder']
+        manifests = clstate[b'manifests']
+        changedfiles = clstate[b'changedfiles']
 
         # We need to make sure that the linkrev in the changegroup refers to
         # the first changeset that introduced the manifest or file revision.
@@ -950,7 +950,7 @@
         # either, because we don't discover which directory nodes to
         # send along with files. This could probably be fixed.
         fastpathlinkrev = fastpathlinkrev and (
-            'treemanifest' not in repo.requirements
+            b'treemanifest' not in repo.requirements
         )
 
         fnodes = {}  # needed file nodes
@@ -963,7 +963,7 @@
             manifests,
             fnodes,
             source,
-            clstate['clrevtomanifestrev'],
+            clstate[b'clrevtomanifestrev'],
         )
 
         for tree, deltas in it:
@@ -983,7 +983,7 @@
             size += len(close)
             yield close
 
-        self._verbosenote(_('%8.i (manifests)\n') % size)
+        self._verbosenote(_(b'%8.i (manifests)\n') % size)
         yield self._manifestsend
 
         mfdicts = None
@@ -1021,12 +1021,12 @@
             size += len(close)
             yield close
 
-            self._verbosenote(_('%8.i  %s\n') % (size, path))
+            self._verbosenote(_(b'%8.i  %s\n') % (size, path))
 
         yield closechunk()
 
         if clnodes:
-            repo.hook('outgoing', node=hex(clnodes[0]), source=source)
+            repo.hook(b'outgoing', node=hex(clnodes[0]), source=source)
 
     def _generatechangelog(self, cl, nodes, generate=True):
         """Generate data for changelog chunks.
@@ -1045,10 +1045,10 @@
         clrevtomanifestrev = {}
 
         state = {
-            'clrevorder': clrevorder,
-            'manifests': manifests,
-            'changedfiles': changedfiles,
-            'clrevtomanifestrev': clrevtomanifestrev,
+            b'clrevorder': clrevorder,
+            b'manifests': manifests,
+            b'changedfiles': changedfiles,
+            b'clrevtomanifestrev': clrevtomanifestrev,
         }
 
         if not (generate or self._ellipses):
@@ -1116,7 +1116,7 @@
             lookupcl,
             self._forcedeltaparentprev,
             ellipses=self._ellipses,
-            topic=_('changesets'),
+            topic=_(b'changesets'),
             clrevtolocalrev={},
             fullclnodes=self._fullclnodes,
             precomputedellipsis=self._precomputedellipsis,
@@ -1141,7 +1141,7 @@
         """
         repo = self._repo
         mfl = repo.manifestlog
-        tmfnodes = {'': manifests}
+        tmfnodes = {b'': manifests}
 
         # Callback for the manifest, used to collect linkrevs for filelog
         # revisions.
@@ -1170,8 +1170,8 @@
                 clnode = nodes[x]
                 mdata = mfl.get(tree, x).readfast(shallow=True)
                 for p, n, fl in mdata.iterentries():
-                    if fl == 't':  # subdirectory manifest
-                        subtree = tree + p + '/'
+                    if fl == b't':  # subdirectory manifest
+                        subtree = tree + p + b'/'
                         tmfclnodes = tmfnodes.setdefault(subtree, {})
                         tmfclnode = tmfclnodes.setdefault(n, clnode)
                         if clrevorder[clnode] < clrevorder[tmfclnode]:
@@ -1220,7 +1220,7 @@
                 lookupfn,
                 self._forcedeltaparentprev,
                 ellipses=self._ellipses,
-                topic=_('manifests'),
+                topic=_(b'manifests'),
                 clrevtolocalrev=clrevtolocalrev,
                 fullclnodes=self._fullclnodes,
                 precomputedellipsis=self._precomputedellipsis,
@@ -1316,13 +1316,13 @@
 
         repo = self._repo
         progress = repo.ui.makeprogress(
-            _('files'), unit=_('files'), total=len(changedfiles)
+            _(b'files'), unit=_(b'files'), total=len(changedfiles)
         )
         for i, fname in enumerate(sorted(changedfiles)):
             filerevlog = repo.file(fname)
             if not filerevlog:
                 raise error.Abort(
-                    _("empty or missing file data for %s") % fname
+                    _(b"empty or missing file data for %s") % fname
                 )
 
             clrevtolocalrev.clear()
@@ -1454,11 +1454,11 @@
 
 
 _packermap = {
-    '01': (_makecg1packer, cg1unpacker),
+    b'01': (_makecg1packer, cg1unpacker),
     # cg2 adds support for exchanging generaldelta
-    '02': (_makecg2packer, cg2unpacker),
+    b'02': (_makecg2packer, cg2unpacker),
     # cg3 adds support for exchanging revlog flags and treemanifests
-    '03': (_makecg3packer, cg3unpacker),
+    b'03': (_makecg3packer, cg3unpacker),
 }
 
 
@@ -1466,9 +1466,9 @@
     versions = set(_packermap.keys())
     needv03 = False
     if (
-        repo.ui.configbool('experimental', 'changegroup3')
-        or repo.ui.configbool('experimental', 'treemanifest')
-        or 'treemanifest' in repo.requirements
+        repo.ui.configbool(b'experimental', b'changegroup3')
+        or repo.ui.configbool(b'experimental', b'treemanifest')
+        or b'treemanifest' in repo.requirements
     ):
         # we keep version 03 because we need to to exchange treemanifest data
         #
@@ -1479,7 +1479,7 @@
         # (or even to push subset of history)
         needv03 = True
     if not needv03:
-        versions.discard('03')
+        versions.discard(b'03')
     return versions
 
 
@@ -1491,24 +1491,24 @@
 # Changegroup versions that can be created from the repo
 def supportedoutgoingversions(repo):
     versions = allsupportedversions(repo)
-    if 'treemanifest' in repo.requirements:
+    if b'treemanifest' in repo.requirements:
         # Versions 01 and 02 support only flat manifests and it's just too
         # expensive to convert between the flat manifest and tree manifest on
         # the fly. Since tree manifests are hashed differently, all of history
         # would have to be converted. Instead, we simply don't even pretend to
         # support versions 01 and 02.
-        versions.discard('01')
-        versions.discard('02')
+        versions.discard(b'01')
+        versions.discard(b'02')
     if repository.NARROW_REQUIREMENT in repo.requirements:
         # Versions 01 and 02 don't support revlog flags, and we need to
         # support that for stripping and unbundling to work.
-        versions.discard('01')
-        versions.discard('02')
+        versions.discard(b'01')
+        versions.discard(b'02')
     if LFS_REQUIREMENT in repo.requirements:
         # Versions 01 and 02 don't support revlog flags, and we need to
         # mark LFS entries with REVIDX_EXTSTORED.
-        versions.discard('01')
-        versions.discard('02')
+        versions.discard(b'01')
+        versions.discard(b'02')
 
     return versions
 
@@ -1524,8 +1524,8 @@
     # will support. For example, all hg versions that support generaldelta also
     # support changegroup 02.
     versions = supportedoutgoingversions(repo)
-    if 'generaldelta' in repo.requirements:
-        versions.discard('01')
+    if b'generaldelta' in repo.requirements:
+        versions.discard(b'01')
     assert versions
     return min(versions)
 
@@ -1548,16 +1548,16 @@
     if oldmatcher is None:
         oldmatcher = matchmod.never()
 
-    if version == '01' and not matcher.always():
+    if version == b'01' and not matcher.always():
         raise error.ProgrammingError(
-            'version 01 changegroups do not support ' 'sparse file matchers'
+            b'version 01 changegroups do not support ' b'sparse file matchers'
         )
 
     if ellipses and version in (b'01', b'02'):
         raise error.Abort(
             _(
-                'ellipsis nodes require at least cg3 on client and server, '
-                'but negotiated version %s'
+                b'ellipsis nodes require at least cg3 on client and server, '
+                b'but negotiated version %s'
             )
             % version
         )
@@ -1584,12 +1584,12 @@
 
 
 def _changegroupinfo(repo, nodes, source):
-    if repo.ui.verbose or source == 'bundle':
-        repo.ui.status(_("%d changesets found\n") % len(nodes))
+    if repo.ui.verbose or source == b'bundle':
+        repo.ui.status(_(b"%d changesets found\n") % len(nodes))
     if repo.ui.debugflag:
-        repo.ui.debug("list of changesets:\n")
+        repo.ui.debug(b"list of changesets:\n")
         for node in nodes:
-            repo.ui.debug("%s\n" % hex(node))
+            repo.ui.debug(b"%s\n" % hex(node))
 
 
 def makechangegroup(
@@ -1607,7 +1607,7 @@
         version,
         util.chunkbuffer(cgstream),
         None,
-        {'clcount': len(outgoing.missing)},
+        {b'clcount': len(outgoing.missing)},
     )
 
 
@@ -1634,7 +1634,7 @@
         repo.filtername is None and heads == sorted(repo.heads())
     )
 
-    repo.hook('preoutgoing', throw=True, source=source)
+    repo.hook(b'preoutgoing', throw=True, source=source)
     _changegroupinfo(repo, csets, source)
     return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
 
@@ -1643,21 +1643,21 @@
     revisions = 0
     files = 0
     progress = repo.ui.makeprogress(
-        _('files'), unit=_('files'), total=expectedfiles
+        _(b'files'), unit=_(b'files'), total=expectedfiles
     )
     for chunkdata in iter(source.filelogheader, {}):
         files += 1
-        f = chunkdata["filename"]
-        repo.ui.debug("adding %s revisions\n" % f)
+        f = chunkdata[b"filename"]
+        repo.ui.debug(b"adding %s revisions\n" % f)
         progress.increment()
         fl = repo.file(f)
         o = len(fl)
         try:
             deltas = source.deltaiter()
             if not fl.addgroup(deltas, revmap, trp):
-                raise error.Abort(_("received file revlog group is empty"))
+                raise error.Abort(_(b"received file revlog group is empty"))
         except error.CensoredBaseError as e:
-            raise error.Abort(_("received delta base is censored: %s") % e)
+            raise error.Abort(_(b"received delta base is censored: %s") % e)
         revisions += len(fl) - o
         if f in needfiles:
             needs = needfiles[f]
@@ -1666,7 +1666,7 @@
                 if n in needs:
                     needs.remove(n)
                 else:
-                    raise error.Abort(_("received spurious file revlog entry"))
+                    raise error.Abort(_(b"received spurious file revlog entry"))
             if not needs:
                 del needfiles[f]
     progress.complete()
@@ -1678,7 +1678,7 @@
                 fl.rev(n)
             except error.LookupError:
                 raise error.Abort(
-                    _('missing file data for %s:%s - run hg verify')
+                    _(b'missing file data for %s:%s - run hg verify')
                     % (f, hex(n))
                 )
 
--- a/mercurial/changelog.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/changelog.py	Sun Oct 06 09:48:39 2019 -0400
@@ -27,7 +27,7 @@
     stringutil,
 )
 
-_defaultextra = {'branch': 'default'}
+_defaultextra = {b'branch': b'default'}
 
 
 def _string_escape(text):
@@ -42,16 +42,20 @@
     True
     """
     # subset of the string_escape codec
-    text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
-    return text.replace('\0', '\\0')
+    text = (
+        text.replace(b'\\', b'\\\\')
+        .replace(b'\n', b'\\n')
+        .replace(b'\r', b'\\r')
+    )
+    return text.replace(b'\0', b'\\0')
 
 
 def _string_unescape(text):
-    if '\\0' in text:
+    if b'\\0' in text:
         # fix up \0 without getting into trouble with \\0
-        text = text.replace('\\\\', '\\\\\n')
-        text = text.replace('\\0', '\0')
-        text = text.replace('\n', '')
+        text = text.replace(b'\\\\', b'\\\\\n')
+        text = text.replace(b'\\0', b'\0')
+        text = text.replace(b'\n', b'')
     return stringutil.unescapestr(text)
 
 
@@ -67,9 +71,9 @@
     [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
     """
     extra = _defaultextra.copy()
-    for l in text.split('\0'):
+    for l in text.split(b'\0'):
         if l:
-            k, v = _string_unescape(l).split(':', 1)
+            k, v = _string_unescape(l).split(b':', 1)
             extra[k] = v
     return extra
 
@@ -77,19 +81,22 @@
 def encodeextra(d):
     # keys must be sorted to produce a deterministic changelog entry
     items = [
-        _string_escape('%s:%s' % (k, pycompat.bytestr(d[k]))) for k in sorted(d)
+        _string_escape(b'%s:%s' % (k, pycompat.bytestr(d[k])))
+        for k in sorted(d)
     ]
-    return "\0".join(items)
+    return b"\0".join(items)
 
 
 def encodecopies(files, copies):
     items = []
     for i, dst in enumerate(files):
         if dst in copies:
-            items.append('%d\0%s' % (i, copies[dst]))
+            items.append(b'%d\0%s' % (i, copies[dst]))
     if len(items) != len(copies):
-        raise error.ProgrammingError('some copy targets missing from file list')
-    return "\n".join(items)
+        raise error.ProgrammingError(
+            b'some copy targets missing from file list'
+        )
+    return b"\n".join(items)
 
 
 def decodecopies(files, data):
@@ -97,8 +104,8 @@
         copies = {}
         if not data:
             return copies
-        for l in data.split('\n'):
-            strindex, src = l.split('\0')
+        for l in data.split(b'\n'):
+            strindex, src = l.split(b'\0')
             i = int(strindex)
             dst = files[i]
             copies[dst] = src
@@ -114,8 +121,8 @@
     indices = []
     for i, f in enumerate(files):
         if f in subset:
-            indices.append('%d' % i)
-    return '\n'.join(indices)
+            indices.append(b'%d' % i)
+    return b'\n'.join(indices)
 
 
 def decodefileindices(files, data):
@@ -123,7 +130,7 @@
         subset = []
         if not data:
             return subset
-        for strindex in data.split('\n'):
+        for strindex in data.split(b'\n'):
             i = int(strindex)
             if i < 0 or i >= len(files):
                 return None
@@ -137,7 +144,7 @@
 
 def stripdesc(desc):
     """strip trailing whitespace and leading and trailing empty lines"""
-    return '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n')
+    return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
 
 
 class appender(object):
@@ -181,7 +188,7 @@
 
     def read(self, count=-1):
         '''only trick here is reads that span real file and data'''
-        ret = ""
+        ret = b""
         if self.offset < self.size:
             s = self.fp.read(count)
             ret = s
@@ -190,7 +197,7 @@
                 count -= len(s)
         if count != 0:
             doff = self.offset - self.size
-            self.data.insert(0, "".join(self.data))
+            self.data.insert(0, b"".join(self.data))
             del self.data[1:]
             s = self.data[0][doff : doff + count]
             self.offset += len(s)
@@ -213,10 +220,10 @@
 def _divertopener(opener, target):
     """build an opener that writes in 'target.a' instead of 'target'"""
 
-    def _divert(name, mode='r', checkambig=False):
+    def _divert(name, mode=b'r', checkambig=False):
         if name != target:
             return opener(name, mode)
-        return opener(name + ".a", mode)
+        return opener(name + b".a", mode)
 
     return _divert
 
@@ -224,7 +231,7 @@
 def _delayopener(opener, target, buf):
     """build an opener that stores chunks in 'buf' instead of 'target'"""
 
-    def _delay(name, mode='r', checkambig=False):
+    def _delay(name, mode=b'r', checkambig=False):
         if name != target:
             return opener(name, mode)
         return appender(opener, name, mode, buf)
@@ -238,14 +245,14 @@
     # it in
     extra = attr.ib()
     manifest = attr.ib(default=nullid)
-    user = attr.ib(default='')
+    user = attr.ib(default=b'')
     date = attr.ib(default=(0, 0))
     files = attr.ib(default=attr.Factory(list))
     filesadded = attr.ib(default=None)
     filesremoved = attr.ib(default=None)
     p1copies = attr.ib(default=None)
     p2copies = attr.ib(default=None)
-    description = attr.ib(default='')
+    description = attr.ib(default=b'')
 
 
 class changelogrevision(object):
@@ -280,16 +287,16 @@
         #
         # changelog v0 doesn't use extra
 
-        nl1 = text.index('\n')
-        nl2 = text.index('\n', nl1 + 1)
-        nl3 = text.index('\n', nl2 + 1)
+        nl1 = text.index(b'\n')
+        nl2 = text.index(b'\n', nl1 + 1)
+        nl3 = text.index(b'\n', nl2 + 1)
 
         # The list of files may be empty. Which means nl3 is the first of the
         # double newline that precedes the description.
-        if text[nl3 + 1 : nl3 + 2] == '\n':
+        if text[nl3 + 1 : nl3 + 2] == b'\n':
             doublenl = nl3
         else:
-            doublenl = text.index('\n\n', nl3 + 1)
+            doublenl = text.index(b'\n\n', nl3 + 1)
 
         self._offsets = (nl1, nl2, nl3, doublenl)
         self._text = text
@@ -309,13 +316,13 @@
     def _rawdate(self):
         off = self._offsets
         dateextra = self._text[off[1] + 1 : off[2]]
-        return dateextra.split(' ', 2)[0:2]
+        return dateextra.split(b' ', 2)[0:2]
 
     @property
     def _rawextra(self):
         off = self._offsets
         dateextra = self._text[off[1] + 1 : off[2]]
-        fields = dateextra.split(' ', 2)
+        fields = dateextra.split(b' ', 2)
         if len(fields) != 3:
             return None
 
@@ -347,26 +354,26 @@
         if off[2] == off[3]:
             return []
 
-        return self._text[off[2] + 1 : off[3]].split('\n')
+        return self._text[off[2] + 1 : off[3]].split(b'\n')
 
     @property
     def filesadded(self):
-        rawindices = self.extra.get('filesadded')
+        rawindices = self.extra.get(b'filesadded')
         return rawindices and decodefileindices(self.files, rawindices)
 
     @property
     def filesremoved(self):
-        rawindices = self.extra.get('filesremoved')
+        rawindices = self.extra.get(b'filesremoved')
         return rawindices and decodefileindices(self.files, rawindices)
 
     @property
     def p1copies(self):
-        rawcopies = self.extra.get('p1copies')
+        rawcopies = self.extra.get(b'p1copies')
         return rawcopies and decodecopies(self.files, rawcopies)
 
     @property
     def p2copies(self):
-        rawcopies = self.extra.get('p2copies')
+        rawcopies = self.extra.get(b'p2copies')
         return rawcopies and decodecopies(self.files, rawcopies)
 
     @property
@@ -385,12 +392,12 @@
         It exists in a separate file to facilitate readers (such as
         hooks processes) accessing data before a transaction is finalized.
         """
-        if trypending and opener.exists('00changelog.i.a'):
-            indexfile = '00changelog.i.a'
+        if trypending and opener.exists(b'00changelog.i.a'):
+            indexfile = b'00changelog.i.a'
         else:
-            indexfile = '00changelog.i'
+            indexfile = b'00changelog.i'
 
-        datafile = '00changelog.d'
+        datafile = b'00changelog.d'
         revlog.revlog.__init__(
             self,
             opener,
@@ -416,7 +423,7 @@
         self._delaybuf = None
         self._divert = False
         self.filteredrevs = frozenset()
-        self._copiesstorage = opener.options.get('copies-storage')
+        self._copiesstorage = opener.options.get(b'copies-storage')
 
     def tiprev(self):
         for i in pycompat.xrange(len(self) - 1, -2, -1):
@@ -494,7 +501,7 @@
         r = super(changelog, self).rev(node)
         if r in self.filteredrevs:
             raise error.FilteredLookupError(
-                hex(node), self.indexfile, _('filtered node')
+                hex(node), self.indexfile, _(b'filtered node')
             )
         return r
 
@@ -523,13 +530,13 @@
         return super(changelog, self).flags(rev)
 
     def delayupdate(self, tr):
-        "delay visibility of index updates to other readers"
+        b"delay visibility of index updates to other readers"
 
         if not self._delayed:
             if len(self) == 0:
                 self._divert = True
-                if self._realopener.exists(self.indexfile + '.a'):
-                    self._realopener.unlink(self.indexfile + '.a')
+                if self._realopener.exists(self.indexfile + b'.a'):
+                    self._realopener.unlink(self.indexfile + b'.a')
                 self.opener = _divertopener(self._realopener, self.indexfile)
             else:
                 self._delaybuf = []
@@ -537,23 +544,23 @@
                     self._realopener, self.indexfile, self._delaybuf
                 )
         self._delayed = True
-        tr.addpending('cl-%i' % id(self), self._writepending)
-        tr.addfinalize('cl-%i' % id(self), self._finalize)
+        tr.addpending(b'cl-%i' % id(self), self._writepending)
+        tr.addfinalize(b'cl-%i' % id(self), self._finalize)
 
     def _finalize(self, tr):
-        "finalize index updates"
+        b"finalize index updates"
         self._delayed = False
         self.opener = self._realopener
         # move redirected index data back into place
         if self._divert:
             assert not self._delaybuf
-            tmpname = self.indexfile + ".a"
+            tmpname = self.indexfile + b".a"
             nfile = self.opener.open(tmpname)
             nfile.close()
             self.opener.rename(tmpname, self.indexfile, checkambig=True)
         elif self._delaybuf:
-            fp = self.opener(self.indexfile, 'a', checkambig=True)
-            fp.write("".join(self._delaybuf))
+            fp = self.opener(self.indexfile, b'a', checkambig=True)
+            fp.write(b"".join(self._delaybuf))
             fp.close()
             self._delaybuf = None
         self._divert = False
@@ -561,18 +568,18 @@
         self._enforceinlinesize(tr)
 
     def _writepending(self, tr):
-        "create a file containing the unfinalized state for pretxnchangegroup"
+        b"create a file containing the unfinalized state for pretxnchangegroup"
         if self._delaybuf:
             # make a temporary copy of the index
             fp1 = self._realopener(self.indexfile)
-            pendingfilename = self.indexfile + ".a"
+            pendingfilename = self.indexfile + b".a"
             # register as a temp file to ensure cleanup on failure
             tr.registertmp(pendingfilename)
             # write existing data
-            fp2 = self._realopener(pendingfilename, "w")
+            fp2 = self._realopener(pendingfilename, b"w")
             fp2.write(fp1.read())
             # add pending data
-            fp2.write("".join(self._delaybuf))
+            fp2.write(b"".join(self._delaybuf))
             fp2.close()
             # switch modes so finalize can simply rename
             self._delaybuf = None
@@ -618,8 +625,8 @@
         text = self.revision(node)
         if not text:
             return []
-        last = text.index("\n\n")
-        l = text[:last].split('\n')
+        last = text.index(b"\n\n")
+        l = text[:last].split(b'\n')
         return l[3:]
 
     def add(
@@ -648,29 +655,34 @@
         # revision text contain two "\n\n" sequences -> corrupt
         # repository since read cannot unpack the revision.
         if not user:
-            raise error.StorageError(_("empty username"))
-        if "\n" in user:
+            raise error.StorageError(_(b"empty username"))
+        if b"\n" in user:
             raise error.StorageError(
-                _("username %r contains a newline") % pycompat.bytestr(user)
+                _(b"username %r contains a newline") % pycompat.bytestr(user)
             )
 
         desc = stripdesc(desc)
 
         if date:
-            parseddate = "%d %d" % dateutil.parsedate(date)
+            parseddate = b"%d %d" % dateutil.parsedate(date)
         else:
-            parseddate = "%d %d" % dateutil.makedate()
+            parseddate = b"%d %d" % dateutil.makedate()
         if extra:
-            branch = extra.get("branch")
-            if branch in ("default", ""):
-                del extra["branch"]
-            elif branch in (".", "null", "tip"):
+            branch = extra.get(b"branch")
+            if branch in (b"default", b""):
+                del extra[b"branch"]
+            elif branch in (b".", b"null", b"tip"):
                 raise error.StorageError(
-                    _('the name \'%s\' is reserved') % branch
+                    _(b'the name \'%s\' is reserved') % branch
                 )
         sortedfiles = sorted(files)
         if extra is not None:
-            for name in ('p1copies', 'p2copies', 'filesadded', 'filesremoved'):
+            for name in (
+                b'p1copies',
+                b'p2copies',
+                b'filesadded',
+                b'filesremoved',
+            ):
                 extra.pop(name, None)
         if p1copies is not None:
             p1copies = encodecopies(sortedfiles, p1copies)
@@ -680,24 +692,24 @@
             filesadded = encodefileindices(sortedfiles, filesadded)
         if filesremoved is not None:
             filesremoved = encodefileindices(sortedfiles, filesremoved)
-        if self._copiesstorage == 'extra':
+        if self._copiesstorage == b'extra':
             extrasentries = p1copies, p2copies, filesadded, filesremoved
             if extra is None and any(x is not None for x in extrasentries):
                 extra = {}
             if p1copies is not None:
-                extra['p1copies'] = p1copies
+                extra[b'p1copies'] = p1copies
             if p2copies is not None:
-                extra['p2copies'] = p2copies
+                extra[b'p2copies'] = p2copies
             if filesadded is not None:
-                extra['filesadded'] = filesadded
+                extra[b'filesadded'] = filesadded
             if filesremoved is not None:
-                extra['filesremoved'] = filesremoved
+                extra[b'filesremoved'] = filesremoved
 
         if extra:
             extra = encodeextra(extra)
-            parseddate = "%s %s" % (parseddate, extra)
-        l = [hex(manifest), user, parseddate] + sortedfiles + ["", desc]
-        text = "\n".join(l)
+            parseddate = b"%s %s" % (parseddate, extra)
+        l = [hex(manifest), user, parseddate] + sortedfiles + [b"", desc]
+        text = b"\n".join(l)
         return self.addrevision(text, transaction, len(self), p1, p2)
 
     def branchinfo(self, rev):
@@ -706,11 +718,11 @@
         This function exists because creating a changectx object
         just to access this is costly."""
         extra = self.read(rev)[5]
-        return encoding.tolocal(extra.get("branch")), 'close' in extra
+        return encoding.tolocal(extra.get(b"branch")), b'close' in extra
 
     def _nodeduplicatecallback(self, transaction, node):
         # keep track of revisions that got "re-added", eg: unbunde of know rev.
         #
         # We track them in a list to preserve their order from the source bundle
-        duplicates = transaction.changes.setdefault('revduplicates', [])
+        duplicates = transaction.changes.setdefault(b'revduplicates', [])
         duplicates.append(self.rev(node))
--- a/mercurial/chgserver.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/chgserver.py	Sun Oct 06 09:48:39 2019 -0400
@@ -75,14 +75,14 @@
 
 # sensitive config sections affecting confighash
 _configsections = [
-    'alias',  # affects global state commands.table
-    'eol',  # uses setconfig('eol', ...)
-    'extdiff',  # uisetup will register new commands
-    'extensions',
+    b'alias',  # affects global state commands.table
+    b'eol',  # uses setconfig('eol', ...)
+    b'extdiff',  # uisetup will register new commands
+    b'extensions',
 ]
 
 _configsectionitems = [
-    ('commands', 'show.aliasprefix'),  # show.py reads it in extsetup
+    (b'commands', b'show.aliasprefix'),  # show.py reads it in extsetup
 ]
 
 # sensitive environment variables affecting confighash
@@ -121,8 +121,8 @@
         sectionitems.append(ui.config(section, item))
     sectionhash = _hashlist(sectionitems)
     # If $CHGHG is set, the change to $HG should not trigger a new chg server
-    if 'CHGHG' in encoding.environ:
-        ignored = {'HG'}
+    if b'CHGHG' in encoding.environ:
+        ignored = {b'HG'}
     else:
         ignored = set()
     envitems = [
@@ -204,8 +204,8 @@
         confighash = _confighash(ui)
         mtimehash = _mtimehash(mtimepaths)
         ui.log(
-            'cmdserver',
-            'confighash = %s mtimehash = %s\n',
+            b'cmdserver',
+            b'confighash = %s mtimehash = %s\n',
             confighash,
             mtimehash,
         )
@@ -229,7 +229,7 @@
             # will behave differently (i.e. write to stdout).
             if (
                 out is not self.fout
-                or not util.safehasattr(self.fout, 'fileno')
+                or not util.safehasattr(self.fout, b'fileno')
                 or self.fout.fileno() != procutil.stdout.fileno()
                 or self._finoutredirected
             ):
@@ -241,8 +241,8 @@
             self._csystem(
                 cmd,
                 procutil.shellenviron(env),
-                type='pager',
-                cmdtable={'attachio': attachio},
+                type=b'pager',
+                cmdtable={b'attachio': attachio},
             )
             return True
 
@@ -253,27 +253,27 @@
     from . import dispatch  # avoid cycle
 
     newui = srcui.__class__.load()
-    for a in ['fin', 'fout', 'ferr', 'environ']:
+    for a in [b'fin', b'fout', b'ferr', b'environ']:
         setattr(newui, a, getattr(srcui, a))
-    if util.safehasattr(srcui, '_csystem'):
+    if util.safehasattr(srcui, b'_csystem'):
         newui._csystem = srcui._csystem
 
     # command line args
     options = dispatch._earlyparseopts(newui, args)
-    dispatch._parseconfig(newui, options['config'])
+    dispatch._parseconfig(newui, options[b'config'])
 
     # stolen from tortoisehg.util.copydynamicconfig()
     for section, name, value in srcui.walkconfig():
         source = srcui.configsource(section, name)
-        if ':' in source or source == '--config' or source.startswith('$'):
+        if b':' in source or source == b'--config' or source.startswith(b'$'):
             # path:line or command line, or environ
             continue
         newui.setconfig(section, name, value, source)
 
     # load wd and repo config, copied from dispatch.py
-    cwd = options['cwd']
+    cwd = options[b'cwd']
     cwd = cwd and os.path.realpath(cwd) or None
-    rpath = options['repository']
+    rpath = options[b'repository']
     path, newlui = dispatch._getlocal(newui, rpath, wd=cwd)
 
     extensions.populateui(newui)
@@ -311,22 +311,22 @@
         self.out = out
         self.channel = channel
 
-    def __call__(self, cmd, environ, cwd=None, type='system', cmdtable=None):
-        args = [type, procutil.quotecommand(cmd), os.path.abspath(cwd or '.')]
-        args.extend('%s=%s' % (k, v) for k, v in environ.iteritems())
-        data = '\0'.join(args)
-        self.out.write(struct.pack('>cI', self.channel, len(data)))
+    def __call__(self, cmd, environ, cwd=None, type=b'system', cmdtable=None):
+        args = [type, procutil.quotecommand(cmd), os.path.abspath(cwd or b'.')]
+        args.extend(b'%s=%s' % (k, v) for k, v in environ.iteritems())
+        data = b'\0'.join(args)
+        self.out.write(struct.pack(b'>cI', self.channel, len(data)))
         self.out.write(data)
         self.out.flush()
 
-        if type == 'system':
+        if type == b'system':
             length = self.in_.read(4)
-            (length,) = struct.unpack('>I', length)
+            (length,) = struct.unpack(b'>I', length)
             if length != 4:
-                raise error.Abort(_('invalid response'))
-            (rc,) = struct.unpack('>i', self.in_.read(4))
+                raise error.Abort(_(b'invalid response'))
+            (rc,) = struct.unpack(b'>i', self.in_.read(4))
             return rc
-        elif type == 'pager':
+        elif type == b'pager':
             while True:
                 cmd = self.in_.readline()[:-1]
                 if not cmd:
@@ -334,16 +334,16 @@
                 if cmdtable and cmd in cmdtable:
                     cmdtable[cmd]()
                 else:
-                    raise error.Abort(_('unexpected command: %s') % cmd)
+                    raise error.Abort(_(b'unexpected command: %s') % cmd)
         else:
-            raise error.ProgrammingError('invalid S channel type: %s' % type)
+            raise error.ProgrammingError(b'invalid S channel type: %s' % type)
 
 
 _iochannels = [
     # server.ch, ui.fp, mode
-    ('cin', 'fin', r'rb'),
-    ('cout', 'fout', r'wb'),
-    ('cerr', 'ferr', r'wb'),
+    (b'cin', b'fin', r'rb'),
+    (b'cout', b'fout', r'wb'),
+    (b'cerr', b'ferr', r'wb'),
 ]
 
 
@@ -352,7 +352,7 @@
         self, ui, repo, fin, fout, sock, prereposetups, hashstate, baseaddress
     ):
         super(chgcmdserver, self).__init__(
-            _newchgui(ui, channeledsystem(fin, fout, 'S'), self.attachio),
+            _newchgui(ui, channeledsystem(fin, fout, b'S'), self.attachio),
             repo,
             fin,
             fout,
@@ -365,7 +365,7 @@
         self.baseaddress = baseaddress
         if hashstate is not None:
             self.capabilities = self.capabilities.copy()
-            self.capabilities['validate'] = chgcmdserver.validate
+            self.capabilities[b'validate'] = chgcmdserver.validate
 
     def cleanup(self):
         super(chgcmdserver, self).cleanup()
@@ -381,9 +381,9 @@
         """
         # tell client to sendmsg() with 1-byte payload, which makes it
         # distinctive from "attachio\n" command consumed by client.read()
-        self.clientsock.sendall(struct.pack('>cI', 'I', 1))
+        self.clientsock.sendall(struct.pack(b'>cI', b'I', 1))
         clientfds = util.recvfds(self.clientsock.fileno())
-        self.ui.log('chgserver', 'received fds: %r\n', clientfds)
+        self.ui.log(b'chgserver', b'received fds: %r\n', clientfds)
 
         ui = self.ui
         ui.flush()
@@ -399,7 +399,7 @@
             # to see output immediately on pager, the mode stays unchanged
             # when client re-attached. ferr is unchanged because it should
             # be unbuffered no matter if it is a tty or not.
-            if fn == 'ferr':
+            if fn == b'ferr':
                 newfp = fp
             else:
                 # make it line buffered explicitly because the default is
@@ -413,7 +413,7 @@
             setattr(self, cn, newfp)
 
         self._ioattached = True
-        self.cresult.write(struct.pack('>i', len(clientfds)))
+        self.cresult.write(struct.pack(b'>i', len(clientfds)))
 
     def _saveio(self):
         if self._oldios:
@@ -468,29 +468,29 @@
         except error.ParseError as inst:
             dispatch._formatparse(self.ui.warn, inst)
             self.ui.flush()
-            self.cresult.write('exit 255')
+            self.cresult.write(b'exit 255')
             return
         except error.Abort as inst:
-            self.ui.error(_("abort: %s\n") % inst)
+            self.ui.error(_(b"abort: %s\n") % inst)
             if inst.hint:
-                self.ui.error(_("(%s)\n") % inst.hint)
+                self.ui.error(_(b"(%s)\n") % inst.hint)
             self.ui.flush()
-            self.cresult.write('exit 255')
+            self.cresult.write(b'exit 255')
             return
         newhash = hashstate.fromui(lui, self.hashstate.mtimepaths)
         insts = []
         if newhash.mtimehash != self.hashstate.mtimehash:
             addr = _hashaddress(self.baseaddress, self.hashstate.confighash)
-            insts.append('unlink %s' % addr)
+            insts.append(b'unlink %s' % addr)
             # mtimehash is empty if one or more extensions fail to load.
             # to be compatible with hg, still serve the client this time.
             if self.hashstate.mtimehash:
-                insts.append('reconnect')
+                insts.append(b'reconnect')
         if newhash.confighash != self.hashstate.confighash:
             addr = _hashaddress(self.baseaddress, newhash.confighash)
-            insts.append('redirect %s' % addr)
-        self.ui.log('chgserver', 'validate: %s\n', stringutil.pprint(insts))
-        self.cresult.write('\0'.join(insts) or '\0')
+            insts.append(b'redirect %s' % addr)
+        self.ui.log(b'chgserver', b'validate: %s\n', stringutil.pprint(insts))
+        self.cresult.write(b'\0'.join(insts) or b'\0')
 
     def chdir(self):
         """Change current directory
@@ -501,7 +501,7 @@
         path = self._readstr()
         if not path:
             return
-        self.ui.log('chgserver', 'chdir to %r\n', path)
+        self.ui.log(b'chgserver', b'chdir to %r\n', path)
         os.chdir(path)
 
     def setumask(self):
@@ -514,12 +514,12 @@
         """Change umask"""
         data = self._readstr()
         if len(data) != 4:
-            raise ValueError('invalid mask length in setumask2 request')
+            raise ValueError(b'invalid mask length in setumask2 request')
         self._setumask(data)
 
     def _setumask(self, data):
-        mask = struct.unpack('>I', data)[0]
-        self.ui.log('chgserver', 'setumask %r\n', mask)
+        mask = struct.unpack(b'>I', data)[0]
+        self.ui.log(b'chgserver', b'setumask %r\n', mask)
         os.umask(mask)
 
     def runcommand(self):
@@ -541,38 +541,38 @@
         """
         l = self._readlist()
         try:
-            newenv = dict(s.split('=', 1) for s in l)
+            newenv = dict(s.split(b'=', 1) for s in l)
         except ValueError:
-            raise ValueError('unexpected value in setenv request')
-        self.ui.log('chgserver', 'setenv: %r\n', sorted(newenv.keys()))
+            raise ValueError(b'unexpected value in setenv request')
+        self.ui.log(b'chgserver', b'setenv: %r\n', sorted(newenv.keys()))
         encoding.environ.clear()
         encoding.environ.update(newenv)
 
     capabilities = commandserver.server.capabilities.copy()
     capabilities.update(
         {
-            'attachio': attachio,
-            'chdir': chdir,
-            'runcommand': runcommand,
-            'setenv': setenv,
-            'setumask': setumask,
-            'setumask2': setumask2,
+            b'attachio': attachio,
+            b'chdir': chdir,
+            b'runcommand': runcommand,
+            b'setenv': setenv,
+            b'setumask': setumask,
+            b'setumask2': setumask2,
         }
     )
 
-    if util.safehasattr(procutil, 'setprocname'):
+    if util.safehasattr(procutil, b'setprocname'):
 
         def setprocname(self):
             """Change process title"""
             name = self._readstr()
-            self.ui.log('chgserver', 'setprocname: %r\n', name)
+            self.ui.log(b'chgserver', b'setprocname: %r\n', name)
             procutil.setprocname(name)
 
-        capabilities['setprocname'] = setprocname
+        capabilities[b'setprocname'] = setprocname
 
 
 def _tempaddress(address):
-    return '%s.%d.tmp' % (address, os.getpid())
+    return b'%s.%d.tmp' % (address, os.getpid())
 
 
 def _hashaddress(address, hashstr):
@@ -580,8 +580,8 @@
     # makes it possible for the client to pass 'server.tmp$PID' and follow by
     # an atomic rename to avoid locking when spawning new servers.
     dirname, basename = os.path.split(address)
-    basename = basename.split('.', 1)[0]
-    return '%s-%s' % (os.path.join(dirname, basename), hashstr)
+    basename = basename.split(b'.', 1)[0]
+    return b'%s-%s' % (os.path.join(dirname, basename), hashstr)
 
 
 class chgunixservicehandler(object):
@@ -591,7 +591,7 @@
 
     def __init__(self, ui):
         self.ui = ui
-        self._idletimeout = ui.configint('chgserver', 'idletimeout')
+        self._idletimeout = ui.configint(b'chgserver', b'idletimeout')
         self._lastactive = time.time()
 
     def bindsocket(self, sock, address):
@@ -603,7 +603,7 @@
 
     def _inithashstate(self, address):
         self._baseaddress = address
-        if self.ui.configbool('chgserver', 'skiphash'):
+        if self.ui.configbool(b'chgserver', b'skiphash'):
             self._hashstate = None
             self._realaddress = address
             return
@@ -617,7 +617,7 @@
             # one or more extensions failed to load. mtimehash becomes
             # meaningless because we do not know the paths of those extensions.
             # set mtimehash to an illegal hash value to invalidate the server.
-            self._hashstate.mtimehash = ''
+            self._hashstate.mtimehash = b''
 
     def _bind(self, sock):
         # use a unique temp address so we can stat the file and do ownership
@@ -689,11 +689,11 @@
     # demandimport or detecting chg client started by chg client. When executed
     # here, CHGINTERNALMARK is no longer useful and hence dropped to make
     # environ cleaner.
-    if 'CHGINTERNALMARK' in encoding.environ:
-        del encoding.environ['CHGINTERNALMARK']
+    if b'CHGINTERNALMARK' in encoding.environ:
+        del encoding.environ[b'CHGINTERNALMARK']
 
     if repo:
         # one chgserver can serve multiple repos. drop repo information
-        ui.setconfig('bundle', 'mainreporoot', '', 'repo')
+        ui.setconfig(b'bundle', b'mainreporoot', b'', b'repo')
     h = chgunixservicehandler(ui)
     return commandserver.unixforkingservice(ui, repo=None, opts=opts, handler=h)
--- a/mercurial/cmdutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/cmdutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -61,55 +61,67 @@
 # templates of common command options
 
 dryrunopts = [
-    ('n', 'dry-run', None, _('do not perform actions, just print output')),
+    (b'n', b'dry-run', None, _(b'do not perform actions, just print output')),
 ]
 
 confirmopts = [
-    ('', 'confirm', None, _('ask before applying actions')),
+    (b'', b'confirm', None, _(b'ask before applying actions')),
 ]
 
 remoteopts = [
-    ('e', 'ssh', '', _('specify ssh command to use'), _('CMD')),
+    (b'e', b'ssh', b'', _(b'specify ssh command to use'), _(b'CMD')),
     (
-        '',
-        'remotecmd',
-        '',
-        _('specify hg command to run on the remote side'),
-        _('CMD'),
+        b'',
+        b'remotecmd',
+        b'',
+        _(b'specify hg command to run on the remote side'),
+        _(b'CMD'),
     ),
     (
-        '',
-        'insecure',
+        b'',
+        b'insecure',
         None,
-        _('do not verify server certificate (ignoring web.cacerts config)'),
+        _(b'do not verify server certificate (ignoring web.cacerts config)'),
     ),
 ]
 
 walkopts = [
     (
-        'I',
-        'include',
+        b'I',
+        b'include',
         [],
-        _('include names matching the given patterns'),
-        _('PATTERN'),
+        _(b'include names matching the given patterns'),
+        _(b'PATTERN'),
     ),
     (
-        'X',
-        'exclude',
+        b'X',
+        b'exclude',
         [],
-        _('exclude names matching the given patterns'),
-        _('PATTERN'),
+        _(b'exclude names matching the given patterns'),
+        _(b'PATTERN'),
     ),
 ]
 
 commitopts = [
-    ('m', 'message', '', _('use text as commit message'), _('TEXT')),
-    ('l', 'logfile', '', _('read commit message from file'), _('FILE')),
+    (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
+    (b'l', b'logfile', b'', _(b'read commit message from file'), _(b'FILE')),
 ]
 
 commitopts2 = [
-    ('d', 'date', '', _('record the specified date as commit date'), _('DATE')),
-    ('u', 'user', '', _('record the specified user as committer'), _('USER')),
+    (
+        b'd',
+        b'date',
+        b'',
+        _(b'record the specified date as commit date'),
+        _(b'DATE'),
+    ),
+    (
+        b'u',
+        b'user',
+        b'',
+        _(b'record the specified user as committer'),
+        _(b'USER'),
+    ),
 ]
 
 commitopts3 = [
@@ -118,107 +130,119 @@
 ]
 
 formatteropts = [
-    ('T', 'template', '', _('display with template'), _('TEMPLATE')),
+    (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
 ]
 
 templateopts = [
     (
-        '',
-        'style',
-        '',
-        _('display using template map file (DEPRECATED)'),
-        _('STYLE'),
+        b'',
+        b'style',
+        b'',
+        _(b'display using template map file (DEPRECATED)'),
+        _(b'STYLE'),
     ),
-    ('T', 'template', '', _('display with template'), _('TEMPLATE')),
+    (b'T', b'template', b'', _(b'display with template'), _(b'TEMPLATE')),
 ]
 
 logopts = [
-    ('p', 'patch', None, _('show patch')),
-    ('g', 'git', None, _('use git extended diff format')),
-    ('l', 'limit', '', _('limit number of changes displayed'), _('NUM')),
-    ('M', 'no-merges', None, _('do not show merges')),
-    ('', 'stat', None, _('output diffstat-style summary of changes')),
-    ('G', 'graph', None, _("show the revision DAG")),
+    (b'p', b'patch', None, _(b'show patch')),
+    (b'g', b'git', None, _(b'use git extended diff format')),
+    (b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM')),
+    (b'M', b'no-merges', None, _(b'do not show merges')),
+    (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
+    (b'G', b'graph', None, _(b"show the revision DAG")),
 ] + templateopts
 
 diffopts = [
-    ('a', 'text', None, _('treat all files as text')),
-    ('g', 'git', None, _('use git extended diff format')),
-    ('', 'binary', None, _('generate binary diffs in git mode (default)')),
-    ('', 'nodates', None, _('omit dates from diff headers')),
+    (b'a', b'text', None, _(b'treat all files as text')),
+    (b'g', b'git', None, _(b'use git extended diff format')),
+    (b'', b'binary', None, _(b'generate binary diffs in git mode (default)')),
+    (b'', b'nodates', None, _(b'omit dates from diff headers')),
 ]
 
 diffwsopts = [
     (
-        'w',
-        'ignore-all-space',
+        b'w',
+        b'ignore-all-space',
         None,
-        _('ignore white space when comparing lines'),
+        _(b'ignore white space when comparing lines'),
     ),
     (
-        'b',
-        'ignore-space-change',
+        b'b',
+        b'ignore-space-change',
         None,
-        _('ignore changes in the amount of white space'),
+        _(b'ignore changes in the amount of white space'),
     ),
     (
-        'B',
-        'ignore-blank-lines',
+        b'B',
+        b'ignore-blank-lines',
         None,
-        _('ignore changes whose lines are all blank'),
+        _(b'ignore changes whose lines are all blank'),
     ),
     (
-        'Z',
-        'ignore-space-at-eol',
+        b'Z',
+        b'ignore-space-at-eol',
         None,
-        _('ignore changes in whitespace at EOL'),
+        _(b'ignore changes in whitespace at EOL'),
     ),
 ]
 
 diffopts2 = (
     [
-        ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
+        (b'', b'noprefix', None, _(b'omit a/ and b/ prefixes from filenames')),
         (
-            'p',
-            'show-function',
+            b'p',
+            b'show-function',
             None,
-            _('show which function each change is in'),
+            _(b'show which function each change is in'),
         ),
-        ('', 'reverse', None, _('produce a diff that undoes the changes')),
+        (b'', b'reverse', None, _(b'produce a diff that undoes the changes')),
     ]
     + diffwsopts
     + [
-        ('U', 'unified', '', _('number of lines of context to show'), _('NUM')),
-        ('', 'stat', None, _('output diffstat-style summary of changes')),
-        ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
+        (
+            b'U',
+            b'unified',
+            b'',
+            _(b'number of lines of context to show'),
+            _(b'NUM'),
+        ),
+        (b'', b'stat', None, _(b'output diffstat-style summary of changes')),
+        (
+            b'',
+            b'root',
+            b'',
+            _(b'produce diffs relative to subdirectory'),
+            _(b'DIR'),
+        ),
     ]
 )
 
 mergetoolopts = [
-    ('t', 'tool', '', _('specify merge tool'), _('TOOL')),
+    (b't', b'tool', b'', _(b'specify merge tool'), _(b'TOOL')),
 ]
 
 similarityopts = [
     (
-        's',
-        'similarity',
-        '',
-        _('guess renamed files by similarity (0<=s<=100)'),
-        _('SIMILARITY'),
+        b's',
+        b'similarity',
+        b'',
+        _(b'guess renamed files by similarity (0<=s<=100)'),
+        _(b'SIMILARITY'),
     )
 ]
 
-subrepoopts = [('S', 'subrepos', None, _('recurse into subrepositories'))]
+subrepoopts = [(b'S', b'subrepos', None, _(b'recurse into subrepositories'))]
 
 debugrevlogopts = [
-    ('c', 'changelog', False, _('open changelog')),
-    ('m', 'manifest', False, _('open manifest')),
-    ('', 'dir', '', _('open directory manifest')),
+    (b'c', b'changelog', False, _(b'open changelog')),
+    (b'm', b'manifest', False, _(b'open manifest')),
+    (b'', b'dir', b'', _(b'open directory manifest')),
 ]
 
 # special string such that everything below this line will be ingored in the
 # editor text
-_linebelow = "^HG: ------------------------ >8 ------------------------$"
+_linebelow = b"^HG: ------------------------ >8 ------------------------$"
 
 
 def resolvecommitoptions(ui, opts):
@@ -227,13 +251,13 @@
     The return value indicates that ``rewrite.update-timestamp`` is the reason
     the ``date`` option is set.
     """
-    if opts.get('date') and opts.get('currentdate'):
+    if opts.get(b'date') and opts.get(b'currentdate'):
         raise error.Abort(
-            _('--date and --currentdate are mutually ' 'exclusive')
+            _(b'--date and --currentdate are mutually ' b'exclusive')
         )
     if opts.get(b'user') and opts.get(b'currentuser'):
         raise error.Abort(
-            _('--user and --currentuser are mutually ' 'exclusive')
+            _(b'--user and --currentuser are mutually ' b'exclusive')
         )
 
     datemaydiffer = False  # date-only change should be ignored?
@@ -241,9 +265,9 @@
     if opts.get(b'currentdate'):
         opts[b'date'] = b'%d %d' % dateutil.makedate()
     elif (
-        not opts.get('date')
-        and ui.configbool('rewrite', 'update-timestamp')
-        and opts.get('currentdate') is None
+        not opts.get(b'date')
+        and ui.configbool(b'rewrite', b'update-timestamp')
+        and opts.get(b'currentdate') is None
     ):
         opts[b'date'] = b'%d %d' % dateutil.makedate()
         datemaydiffer = True
@@ -257,7 +281,7 @@
 def checknotesize(ui, opts):
     """ make sure note is of valid format """
 
-    note = opts.get('note')
+    note = opts.get(b'note')
     if not note:
         return
 
@@ -289,13 +313,13 @@
 
 
 def parsealiases(cmd):
-    return cmd.split("|")
+    return cmd.split(b"|")
 
 
 def setupwrapcolorwrite(ui):
     # wrap ui.write so diff output can be labeled/colorized
     def wrapwrite(orig, *args, **kw):
-        label = kw.pop(r'label', '')
+        label = kw.pop(r'label', b'')
         for chunk, l in patch.difflabel(lambda: args):
             orig(chunk, label=label + l)
 
@@ -322,8 +346,8 @@
                 ui, originalhunks, recordfn, operation
             )
     except crecordmod.fallbackerror as e:
-        ui.warn('%s\n' % e.message)
-        ui.warn(_('falling back to text mode\n'))
+        ui.warn(b'%s\n' % e.message)
+        ui.warn(_(b'falling back to text mode\n'))
 
     return patch.filterpatch(ui, originalhunks, match, operation)
 
@@ -336,7 +360,7 @@
     (see patch.filterpatch).
     """
     usecurses = crecordmod.checkcurses(ui)
-    testfile = ui.config('experimental', 'crecordtest')
+    testfile = ui.config(b'experimental', b'crecordtest')
     oldwrite = setupwrapcolorwrite(ui)
     try:
         newchunks, newopts = filterchunks(
@@ -353,13 +377,13 @@
     opts = pycompat.byteskwargs(opts)
     if not ui.interactive():
         if cmdsuggest:
-            msg = _('running non-interactively, use %s instead') % cmdsuggest
+            msg = _(b'running non-interactively, use %s instead') % cmdsuggest
         else:
-            msg = _('running non-interactively')
+            msg = _(b'running non-interactively')
         raise error.Abort(msg)
 
     # make sure username is set before going interactive
-    if not opts.get('user'):
+    if not opts.get(b'user'):
         ui.username()  # raise exception, username not provided
 
     def recordfunc(ui, repo, message, match, opts):
@@ -376,22 +400,22 @@
         In the end we'll record interesting changes, and everything else
         will be left in place, so the user can continue working.
         """
-        if not opts.get('interactive-unshelve'):
+        if not opts.get(b'interactive-unshelve'):
             checkunfinished(repo, commit=True)
         wctx = repo[None]
         merge = len(wctx.parents()) > 1
         if merge:
             raise error.Abort(
                 _(
-                    'cannot partially commit a merge '
-                    '(use "hg commit" instead)'
+                    b'cannot partially commit a merge '
+                    b'(use "hg commit" instead)'
                 )
             )
 
         def fail(f, msg):
-            raise error.Abort('%s: %s' % (f, msg))
-
-        force = opts.get('force')
+            raise error.Abort(b'%s: %s' % (f, msg))
+
+        force = opts.get(b'force')
         if not force:
             vdirs = []
             match = matchmod.badmatch(match, fail)
@@ -428,8 +452,8 @@
             ui,
             opts=opts,
             whitespace=True,
-            section='commands',
-            configprefix='commit.interactive.',
+            section=b'commands',
+            configprefix=b'commit.interactive.',
         )
         diffopts.nodates = True
         diffopts.git = True
@@ -442,7 +466,7 @@
         try:
             chunks, newopts = filterfn(ui, originalchunks, match)
         except error.PatchError as err:
-            raise error.Abort(_('error parsing patch: %s') % err)
+            raise error.Abort(_(b'error parsing patch: %s') % err)
         opts.update(newopts)
 
         # We need to keep a backup of files that have been newly added and
@@ -463,7 +487,7 @@
         changed = status.modified + status.added + status.removed
         newfiles = [f for f in changed if f in contenders]
         if not newfiles:
-            ui.status(_('no changes to record\n'))
+            ui.status(_(b'no changes to record\n'))
             return 0
 
         modified = set(status.modified)
@@ -480,7 +504,7 @@
             ]
         backups = {}
         if tobackup:
-            backupdir = repo.vfs.join('record-backups')
+            backupdir = repo.vfs.join(b'record-backups')
             try:
                 os.mkdir(backupdir)
             except OSError as err:
@@ -490,10 +514,10 @@
             # backup continues
             for f in tobackup:
                 fd, tmpname = pycompat.mkstemp(
-                    prefix=f.replace('/', '_') + '.', dir=backupdir
+                    prefix=f.replace(b'/', b'_') + b'.', dir=backupdir
                 )
                 os.close(fd)
-                ui.debug('backup %r as %r\n' % (f, tmpname))
+                ui.debug(b'backup %r as %r\n' % (f, tmpname))
                 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
                 backups[f] = tmpname
 
@@ -506,14 +530,14 @@
             fp.seek(0)
 
             # 2.5 optionally review / modify patch in text editor
-            if opts.get('review', False):
+            if opts.get(b'review', False):
                 patchtext = (
                     crecordmod.diffhelptext
                     + crecordmod.patchhelptext
                     + fp.read()
                 )
                 reviewedpatch = ui.edit(
-                    patchtext, "", action="diff", repopath=repo.path
+                    patchtext, b"", action=b"diff", repopath=repo.path
                 )
                 fp.truncate(0)
                 fp.write(reviewedpatch)
@@ -535,7 +559,7 @@
             # 3b. (apply)
             if dopatch:
                 try:
-                    ui.debug('applying patch\n')
+                    ui.debug(b'applying patch\n')
                     ui.debug(fp.getvalue())
                     patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                 except error.PatchError as err:
@@ -554,9 +578,9 @@
             try:
                 dirstate = repo.dirstate
                 for realname, tmpname in backups.iteritems():
-                    ui.debug('restoring %r to %r\n' % (tmpname, realname))
-
-                    if dirstate[realname] == 'n':
+                    ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
+
+                    if dirstate[realname] == b'n':
                         # without normallookup, restoring timestamp
                         # may cause partially committed files
                         # to be treated as unmodified
@@ -618,8 +642,8 @@
 
         # the filename contains a path separator, it means it's not the direct
         # child of this directory
-        if '/' in filename:
-            subdir, filep = filename.split('/', 1)
+        if b'/' in filename:
+            subdir, filep = filename.split(b'/', 1)
 
             # does the dirnode object for subdir exists
             if subdir not in self.subdirs:
@@ -669,7 +693,7 @@
             # Making sure we terse only when the status abbreviation is
             # passed as terse argument
             if onlyst in terseargs:
-                yield onlyst, self.path + '/'
+                yield onlyst, self.path + b'/'
                 return
 
         # add the files to status list
@@ -696,23 +720,23 @@
     directory or not.
     """
     # the order matters here as that is used to produce final list
-    allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
+    allst = (b'm', b'a', b'r', b'd', b'u', b'i', b'c')
 
     # checking the argument validity
     for s in pycompat.bytestr(terseargs):
         if s not in allst:
-            raise error.Abort(_("'%s' not recognized") % s)
+            raise error.Abort(_(b"'%s' not recognized") % s)
 
     # creating a dirnode object for the root of the repo
-    rootobj = dirnode('')
+    rootobj = dirnode(b'')
     pstatus = (
-        'modified',
-        'added',
-        'deleted',
-        'clean',
-        'unknown',
-        'ignored',
-        'removed',
+        b'modified',
+        b'added',
+        b'deleted',
+        b'clean',
+        b'unknown',
+        b'ignored',
+        b'removed',
     )
 
     tersedict = {}
@@ -742,8 +766,8 @@
 def _commentlines(raw):
     '''Surround lineswith a comment char and a new line'''
     lines = raw.splitlines()
-    commentedlines = ['# %s' % line for line in lines]
-    return '\n'.join(commentedlines) + '\n'
+    commentedlines = [b'# %s' % line for line in lines]
+    return b'\n'.join(commentedlines) + b'\n'
 
 
 def _conflictsmsg(repo):
@@ -754,9 +778,9 @@
     m = scmutil.match(repo[None])
     unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
     if unresolvedlist:
-        mergeliststr = '\n'.join(
+        mergeliststr = b'\n'.join(
             [
-                '    %s' % util.pathto(repo.root, encoding.getcwd(), path)
+                b'    %s' % util.pathto(repo.root, encoding.getcwd(), path)
                 for path in sorted(unresolvedlist)
             ]
         )
@@ -771,23 +795,23 @@
             % mergeliststr
         )
     else:
-        msg = _('No unresolved merge conflicts.')
+        msg = _(b'No unresolved merge conflicts.')
 
     return _commentlines(msg)
 
 
 def morestatus(repo, fm):
     statetuple = statemod.getrepostate(repo)
-    label = 'status.morestatus'
+    label = b'status.morestatus'
     if statetuple:
         state, helpfulmsg = statetuple
-        statemsg = _('The repository is in an unfinished *%s* state.') % state
-        fm.plain('%s\n' % _commentlines(statemsg), label=label)
+        statemsg = _(b'The repository is in an unfinished *%s* state.') % state
+        fm.plain(b'%s\n' % _commentlines(statemsg), label=label)
         conmsg = _conflictsmsg(repo)
         if conmsg:
-            fm.plain('%s\n' % conmsg, label=label)
+            fm.plain(b'%s\n' % conmsg, label=label)
         if helpfulmsg:
-            fm.plain('%s\n' % _commentlines(helpfulmsg), label=label)
+            fm.plain(b'%s\n' % _commentlines(helpfulmsg), label=label)
 
 
 def findpossible(cmd, table, strict=False):
@@ -818,7 +842,7 @@
                     found = a
                     break
         if found is not None:
-            if aliases[0].startswith("debug") or found.startswith("debug"):
+            if aliases[0].startswith(b"debug") or found.startswith(b"debug"):
                 debugchoice[found] = (aliases, table[e])
             else:
                 choice[found] = (aliases, table[e])
@@ -849,28 +873,32 @@
 def changebranch(ui, repo, revs, label):
     """ Change the branch name of given revs to label """
 
-    with repo.wlock(), repo.lock(), repo.transaction('branches'):
+    with repo.wlock(), repo.lock(), repo.transaction(b'branches'):
         # abort in case of uncommitted merge or dirty wdir
         bailifchanged(repo)
         revs = scmutil.revrange(repo, revs)
         if not revs:
-            raise error.Abort("empty revision set")
-        roots = repo.revs('roots(%ld)', revs)
+            raise error.Abort(b"empty revision set")
+        roots = repo.revs(b'roots(%ld)', revs)
         if len(roots) > 1:
-            raise error.Abort(_("cannot change branch of non-linear revisions"))
-        rewriteutil.precheck(repo, revs, 'change branch of')
+            raise error.Abort(
+                _(b"cannot change branch of non-linear revisions")
+            )
+        rewriteutil.precheck(repo, revs, b'change branch of')
 
         root = repo[roots.first()]
         rpb = {parent.branch() for parent in root.parents()}
         if label not in rpb and label in repo.branchmap():
-            raise error.Abort(_("a branch of the same name already exists"))
-
-        if repo.revs('obsolete() and %ld', revs):
-            raise error.Abort(_("cannot change branch of a obsolete changeset"))
+            raise error.Abort(_(b"a branch of the same name already exists"))
+
+        if repo.revs(b'obsolete() and %ld', revs):
+            raise error.Abort(
+                _(b"cannot change branch of a obsolete changeset")
+            )
 
         # make sure only topological heads
-        if repo.revs('heads(%ld) - head()', revs):
-            raise error.Abort(_("cannot change branch in middle of a stack"))
+        if repo.revs(b'heads(%ld) - head()', revs):
+            raise error.Abort(_(b"cannot change branch in middle of a stack"))
 
         replacements = {}
         # avoid import cycle mercurial.cmdutil -> mercurial.context ->
@@ -891,11 +919,11 @@
                     return None
 
             ui.debug(
-                "changing branch of '%s' from '%s' to '%s'\n"
+                b"changing branch of '%s' from '%s' to '%s'\n"
                 % (hex(ctx.node()), oldbranch, label)
             )
             extra = ctx.extra()
-            extra['branch_change'] = hex(ctx.node())
+            extra[b'branch_change'] = hex(ctx.node())
             # While changing branch of set of linear commits, make sure that
             # we base our commits on new parent rather than old parent which
             # was obsoleted while changing the branch
@@ -920,10 +948,12 @@
 
             newnode = repo.commitctx(mc)
             replacements[ctx.node()] = (newnode,)
-            ui.debug('new node id is %s\n' % hex(newnode))
+            ui.debug(b'new node id is %s\n' % hex(newnode))
 
         # create obsmarkers and move bookmarks
-        scmutil.cleanupnodes(repo, replacements, 'branch-change', fixphase=True)
+        scmutil.cleanupnodes(
+            repo, replacements, b'branch-change', fixphase=True
+        )
 
         # move the working copy too
         wctx = repo[None]
@@ -937,11 +967,11 @@
 
                 hg.update(repo, newid[0], quietempty=True)
 
-        ui.status(_("changed branch on %d changesets\n") % len(replacements))
+        ui.status(_(b"changed branch on %d changesets\n") % len(replacements))
 
 
 def findrepo(p):
-    while not os.path.isdir(os.path.join(p, ".hg")):
+    while not os.path.isdir(os.path.join(p, b".hg")):
         oldp, p = p, os.path.dirname(p)
         if p == oldp:
             return None
@@ -959,10 +989,10 @@
     """
 
     if merge and repo.dirstate.p2() != nullid:
-        raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
+        raise error.Abort(_(b'outstanding uncommitted merge'), hint=hint)
     modified, added, removed, deleted = repo.status()[:4]
     if modified or added or removed or deleted:
-        raise error.Abort(_('uncommitted changes'), hint=hint)
+        raise error.Abort(_(b'uncommitted changes'), hint=hint)
     ctx = repo[None]
     for s in sorted(ctx.substate):
         ctx.sub(s).bailifchanged(hint=hint)
@@ -970,22 +1000,22 @@
 
 def logmessage(ui, opts):
     """ get the log message according to -m and -l option """
-    message = opts.get('message')
-    logfile = opts.get('logfile')
+    message = opts.get(b'message')
+    logfile = opts.get(b'logfile')
 
     if message and logfile:
         raise error.Abort(
-            _('options --message and --logfile are mutually ' 'exclusive')
+            _(b'options --message and --logfile are mutually ' b'exclusive')
         )
     if not message and logfile:
         try:
             if isstdiofilename(logfile):
                 message = ui.fin.read()
             else:
-                message = '\n'.join(util.readfile(logfile).splitlines())
+                message = b'\n'.join(util.readfile(logfile).splitlines())
         except IOError as inst:
             raise error.Abort(
-                _("can't read commit message '%s': %s")
+                _(b"can't read commit message '%s': %s")
                 % (logfile, encoding.strtolocal(inst.strerror))
             )
     return message
@@ -1002,15 +1032,15 @@
     """
     if isinstance(ctxorbool, bool):
         if ctxorbool:
-            return baseformname + ".merge"
+            return baseformname + b".merge"
     elif len(ctxorbool.parents()) > 1:
-        return baseformname + ".merge"
-
-    return baseformname + ".normal"
+        return baseformname + b".merge"
+
+    return baseformname + b".normal"
 
 
 def getcommiteditor(
-    edit=False, finishdesc=None, extramsg=None, editform='', **opts
+    edit=False, finishdesc=None, extramsg=None, editform=b'', **opts
 ):
     """get appropriate commit message editor according to '--edit' option
 
@@ -1081,7 +1111,7 @@
     t = formatter.maketemplater(
         repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
     )
-    mapping = {'ctx': ctx}
+    mapping = {b'ctx': ctx}
     if props:
         mapping.update(props)
     return t.renderdefault(mapping)
@@ -1145,7 +1175,7 @@
             newname.append(stringutil.escapestr(pat[i:n]))
             if n + 2 > end:
                 raise error.Abort(
-                    _("incomplete format spec in output " "filename")
+                    _(b"incomplete format spec in output " b"filename")
                 )
             c = pat[n + 1 : n + 2]
             i = n + 2
@@ -1153,9 +1183,9 @@
                 newname.append(expander[c])
             except KeyError:
                 raise error.Abort(
-                    _("invalid format spec '%%%s' in output " "filename") % c
+                    _(b"invalid format spec '%%%s' in output " b"filename") % c
                 )
-    return ''.join(newname)
+    return b''.join(newname)
 
 
 def makefilename(ctx, pat, **props):
@@ -1170,7 +1200,7 @@
 
 def isstdiofilename(pat):
     """True if the given pat looks like a filename denoting stdin/stdout"""
-    return not pat or pat == '-'
+    return not pat or pat == b'-'
 
 
 class _unclosablefile(object):
@@ -1193,8 +1223,8 @@
         pass
 
 
-def makefileobj(ctx, pat, mode='wb', **props):
-    writable = mode not in ('r', 'rb')
+def makefileobj(ctx, pat, mode=b'wb', **props):
+    writable = mode not in (b'r', b'rb')
 
     if isstdiofilename(pat):
         repo = ctx.repo()
@@ -1209,21 +1239,21 @@
 
 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
     """opens the changelog, manifest, a filelog or a given revlog"""
-    cl = opts['changelog']
-    mf = opts['manifest']
-    dir = opts['dir']
+    cl = opts[b'changelog']
+    mf = opts[b'manifest']
+    dir = opts[b'dir']
     msg = None
     if cl and mf:
-        msg = _('cannot specify --changelog and --manifest at the same time')
+        msg = _(b'cannot specify --changelog and --manifest at the same time')
     elif cl and dir:
-        msg = _('cannot specify --changelog and --dir at the same time')
+        msg = _(b'cannot specify --changelog and --dir at the same time')
     elif cl or mf or dir:
         if file_:
-            msg = _('cannot specify filename with --changelog or --manifest')
+            msg = _(b'cannot specify filename with --changelog or --manifest')
         elif not repo:
             msg = _(
-                'cannot specify --changelog or --manifest or --dir '
-                'without a repository'
+                b'cannot specify --changelog or --manifest or --dir '
+                b'without a repository'
             )
     if msg:
         raise error.Abort(msg)
@@ -1233,15 +1263,15 @@
         if cl:
             r = repo.unfiltered().changelog
         elif dir:
-            if 'treemanifest' not in repo.requirements:
+            if b'treemanifest' not in repo.requirements:
                 raise error.Abort(
                     _(
-                        "--dir can only be used on repos with "
-                        "treemanifest enabled"
+                        b"--dir can only be used on repos with "
+                        b"treemanifest enabled"
                     )
                 )
-            if not dir.endswith('/'):
-                dir = dir + '/'
+            if not dir.endswith(b'/'):
+                dir = dir + b'/'
             dirlog = repo.manifestlog.getstorage(dir)
             if len(dirlog):
                 r = dirlog
@@ -1257,21 +1287,21 @@
         if returnrevlog:
             if isinstance(r, revlog.revlog):
                 pass
-            elif util.safehasattr(r, '_revlog'):
+            elif util.safehasattr(r, b'_revlog'):
                 r = r._revlog
             elif r is not None:
-                raise error.Abort(_('%r does not appear to be a revlog') % r)
+                raise error.Abort(_(b'%r does not appear to be a revlog') % r)
 
     if not r:
         if not returnrevlog:
-            raise error.Abort(_('cannot give path to non-revlog'))
+            raise error.Abort(_(b'cannot give path to non-revlog'))
 
         if not file_:
-            raise error.CommandError(cmd, _('invalid arguments'))
+            raise error.CommandError(cmd, _(b'invalid arguments'))
         if not os.path.isfile(file_):
-            raise error.Abort(_("revlog '%s' not found") % file_)
+            raise error.Abort(_(b"revlog '%s' not found") % file_)
         r = revlog.revlog(
-            vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + ".i"
+            vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + b".i"
         )
     return r
 
@@ -1296,8 +1326,8 @@
     # ossep => pathname that uses os.sep to separate directories
     cwd = repo.getcwd()
     targets = {}
-    after = opts.get("after")
-    dryrun = opts.get("dry_run")
+    after = opts.get(b"after")
+    dryrun = opts.get(b"dry_run")
     wctx = repo[None]
 
     uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
@@ -1305,22 +1335,22 @@
     def walkpat(pat):
         srcs = []
         if after:
-            badstates = '?'
+            badstates = b'?'
         else:
-            badstates = '?r'
+            badstates = b'?r'
         m = scmutil.match(wctx, [pat], opts, globbed=True)
         for abs in wctx.walk(m):
             state = repo.dirstate[abs]
             rel = uipathfn(abs)
             exact = m.exact(abs)
             if state in badstates:
-                if exact and state == '?':
-                    ui.warn(_('%s: not copying - file is not managed\n') % rel)
-                if exact and state == 'r':
+                if exact and state == b'?':
+                    ui.warn(_(b'%s: not copying - file is not managed\n') % rel)
+                if exact and state == b'r':
                     ui.warn(
                         _(
-                            '%s: not copying - file has been marked for'
-                            ' remove\n'
+                            b'%s: not copying - file has been marked for'
+                            b' remove\n'
                         )
                         % rel
                     )
@@ -1335,11 +1365,11 @@
     # otarget: ossep
     def copyfile(abssrc, relsrc, otarget, exact):
         abstarget = pathutil.canonpath(repo.root, cwd, otarget)
-        if '/' in abstarget:
+        if b'/' in abstarget:
             # We cannot normalize abstarget itself, this would prevent
             # case only renames, like a => A.
-            abspath, absname = abstarget.rsplit('/', 1)
-            abstarget = repo.dirstate.normalize(abspath) + '/' + absname
+            abspath, absname = abstarget.rsplit(b'/', 1)
+            abstarget = repo.dirstate.normalize(abspath) + b'/' + absname
         reltarget = repo.pathto(abstarget, cwd)
         target = repo.wjoin(abstarget)
         src = repo.wjoin(abssrc)
@@ -1351,7 +1381,7 @@
         prevsrc = targets.get(abstarget)
         if prevsrc is not None:
             ui.warn(
-                _('%s: not overwriting - %s collides with %s\n')
+                _(b'%s: not overwriting - %s collides with %s\n')
                 % (
                     reltarget,
                     repo.pathto(abssrc, cwd),
@@ -1368,41 +1398,43 @@
                 abstarget
             ):
                 if not rename:
-                    ui.warn(_("%s: can't copy - same file\n") % reltarget)
+                    ui.warn(_(b"%s: can't copy - same file\n") % reltarget)
                     return True  # report a failure
                 exists = False
                 samefile = True
 
-        if not after and exists or after and state in 'mn':
-            if not opts['force']:
-                if state in 'mn':
-                    msg = _('%s: not overwriting - file already committed\n')
+        if not after and exists or after and state in b'mn':
+            if not opts[b'force']:
+                if state in b'mn':
+                    msg = _(b'%s: not overwriting - file already committed\n')
                     if after:
-                        flags = '--after --force'
+                        flags = b'--after --force'
                     else:
-                        flags = '--force'
+                        flags = b'--force'
                     if rename:
                         hint = (
                             _(
-                                "('hg rename %s' to replace the file by "
-                                'recording a rename)\n'
+                                b"('hg rename %s' to replace the file by "
+                                b'recording a rename)\n'
                             )
                             % flags
                         )
                     else:
                         hint = (
                             _(
-                                "('hg copy %s' to replace the file by "
-                                'recording a copy)\n'
+                                b"('hg copy %s' to replace the file by "
+                                b'recording a copy)\n'
                             )
                             % flags
                         )
                 else:
-                    msg = _('%s: not overwriting - file exists\n')
+                    msg = _(b'%s: not overwriting - file exists\n')
                     if rename:
-                        hint = _("('hg rename --after' to record the rename)\n")
+                        hint = _(
+                            b"('hg rename --after' to record the rename)\n"
+                        )
                     else:
-                        hint = _("('hg copy --after' to record the copy)\n")
+                        hint = _(b"('hg copy --after' to record the copy)\n")
                 ui.warn(msg % reltarget)
                 ui.warn(hint)
                 return True  # report a failure
@@ -1411,12 +1443,12 @@
             if not exists:
                 if rename:
                     ui.warn(
-                        _('%s: not recording move - %s does not exist\n')
+                        _(b'%s: not recording move - %s does not exist\n')
                         % (relsrc, reltarget)
                     )
                 else:
                     ui.warn(
-                        _('%s: not recording copy - %s does not exist\n')
+                        _(b'%s: not recording copy - %s does not exist\n')
                         % (relsrc, reltarget)
                     )
                 return True  # report a failure
@@ -1424,11 +1456,11 @@
             try:
                 if exists:
                     os.unlink(target)
-                targetdir = os.path.dirname(target) or '.'
+                targetdir = os.path.dirname(target) or b'.'
                 if not os.path.isdir(targetdir):
                     os.makedirs(targetdir)
                 if samefile:
-                    tmp = target + "~hgrename"
+                    tmp = target + b"~hgrename"
                     os.rename(src, tmp)
                     os.rename(tmp, target)
                 else:
@@ -1438,20 +1470,20 @@
                 srcexists = True
             except IOError as inst:
                 if inst.errno == errno.ENOENT:
-                    ui.warn(_('%s: deleted in working directory\n') % relsrc)
+                    ui.warn(_(b'%s: deleted in working directory\n') % relsrc)
                     srcexists = False
                 else:
                     ui.warn(
-                        _('%s: cannot copy - %s\n')
+                        _(b'%s: cannot copy - %s\n')
                         % (relsrc, encoding.strtolocal(inst.strerror))
                     )
                     return True  # report a failure
 
         if ui.verbose or not exact:
             if rename:
-                ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
+                ui.status(_(b'moving %s to %s\n') % (relsrc, reltarget))
             else:
-                ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
+                ui.status(_(b'copying %s to %s\n') % (relsrc, reltarget))
 
         targets[abstarget] = abssrc
 
@@ -1461,7 +1493,7 @@
         )
         if rename and not dryrun:
             if not after and srcexists and not samefile:
-                rmdir = repo.ui.configbool('experimental', 'removeemptydirs')
+                rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
                 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
             wctx.forget([abssrc])
 
@@ -1535,21 +1567,21 @@
 
     pats = scmutil.expandpats(pats)
     if not pats:
-        raise error.Abort(_('no source or destination specified'))
+        raise error.Abort(_(b'no source or destination specified'))
     if len(pats) == 1:
-        raise error.Abort(_('no destination specified'))
+        raise error.Abort(_(b'no destination specified'))
     dest = pats.pop()
     destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
     if not destdirexists:
         if len(pats) > 1 or matchmod.patkind(pats[0]):
             raise error.Abort(
                 _(
-                    'with multiple sources, destination must be an '
-                    'existing directory'
+                    b'with multiple sources, destination must be an '
+                    b'existing directory'
                 )
             )
         if util.endswithsep(dest):
-            raise error.Abort(_('destination %s is not a directory') % dest)
+            raise error.Abort(_(b'destination %s is not a directory') % dest)
 
     tfn = targetpathfn
     if after:
@@ -1561,7 +1593,7 @@
             continue
         copylist.append((tfn(pat, dest, srcs), srcs))
     if not copylist:
-        raise error.Abort(_('no files to copy'))
+        raise error.Abort(_(b'no files to copy'))
 
     errors = 0
     for targetpath, srcs in copylist:
@@ -1615,21 +1647,21 @@
     # avoid cycle context -> subrepo -> cmdutil
     from . import context
 
-    tmpname = patchdata.get('filename')
-    message = patchdata.get('message')
-    user = opts.get('user') or patchdata.get('user')
-    date = opts.get('date') or patchdata.get('date')
-    branch = patchdata.get('branch')
-    nodeid = patchdata.get('nodeid')
-    p1 = patchdata.get('p1')
-    p2 = patchdata.get('p2')
-
-    nocommit = opts.get('no_commit')
-    importbranch = opts.get('import_branch')
-    update = not opts.get('bypass')
-    strip = opts["strip"]
-    prefix = opts["prefix"]
-    sim = float(opts.get('similarity') or 0)
+    tmpname = patchdata.get(b'filename')
+    message = patchdata.get(b'message')
+    user = opts.get(b'user') or patchdata.get(b'user')
+    date = opts.get(b'date') or patchdata.get(b'date')
+    branch = patchdata.get(b'branch')
+    nodeid = patchdata.get(b'nodeid')
+    p1 = patchdata.get(b'p1')
+    p2 = patchdata.get(b'p2')
+
+    nocommit = opts.get(b'no_commit')
+    importbranch = opts.get(b'import_branch')
+    update = not opts.get(b'bypass')
+    strip = opts[b"strip"]
+    prefix = opts[b"prefix"]
+    sim = float(opts.get(b'similarity') or 0)
 
     if not tmpname:
         return None, None, False
@@ -1646,13 +1678,13 @@
     else:
         # launch the editor
         message = None
-    ui.debug('message:\n%s\n' % (message or ''))
+    ui.debug(b'message:\n%s\n' % (message or b''))
 
     if len(parents) == 1:
         parents.append(repo[nullid])
-    if opts.get('exact'):
+    if opts.get(b'exact'):
         if not nodeid or not p1:
-            raise error.Abort(_('not a Mercurial patch'))
+            raise error.Abort(_(b'not a Mercurial patch'))
         p1 = repo[p1]
         p2 = repo[p2 or nullid]
     elif p2:
@@ -1670,8 +1702,8 @@
         if p2.node() == nullid:
             ui.warn(
                 _(
-                    "warning: import the patch as a normal revision\n"
-                    "(use --exact to import the patch as a merge)\n"
+                    b"warning: import the patch as a normal revision\n"
+                    b"(use --exact to import the patch as a merge)\n"
                 )
             )
     else:
@@ -1684,10 +1716,10 @@
         if p2 != parents[1]:
             repo.setparents(p1.node(), p2.node())
 
-        if opts.get('exact') or importbranch:
-            repo.dirstate.setbranch(branch or 'default')
-
-        partial = opts.get('partial', False)
+        if opts.get(b'exact') or importbranch:
+            repo.dirstate.setbranch(branch or b'default')
+
+        partial = opts.get(b'partial', False)
         files = set()
         try:
             patch.patch(
@@ -1711,15 +1743,15 @@
             if message:
                 msgs.append(message)
         else:
-            if opts.get('exact') or p2:
+            if opts.get(b'exact') or p2:
                 # If you got here, you either use --force and know what
                 # you are doing or used --exact or a merge patch while
                 # being updated to its first parent.
                 m = None
             else:
                 m = scmutil.matchfiles(repo, files or [])
-            editform = mergeeditform(repo[None], 'import.normal')
-            if opts.get('exact'):
+            editform = mergeeditform(repo[None], b'import.normal')
+            if opts.get(b'exact'):
                 editor = None
             else:
                 editor = getcommiteditor(
@@ -1730,16 +1762,16 @@
                 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
             overrides = {}
             if partial:
-                overrides[('ui', 'allowemptycommit')] = True
-            with repo.ui.configoverride(overrides, 'import'):
+                overrides[(b'ui', b'allowemptycommit')] = True
+            with repo.ui.configoverride(overrides, b'import'):
                 n = repo.commit(
                     message, user, date, match=m, editor=editor, extra=extra
                 )
                 for idfunc in extrapostimport:
                     extrapostimportmap[idfunc](repo[n])
     else:
-        if opts.get('exact') or importbranch:
-            branch = branch or 'default'
+        if opts.get(b'exact') or importbranch:
+            branch = branch or b'default'
         else:
             branch = p1.branch()
         store = patch.filestore()
@@ -1759,10 +1791,10 @@
                 )
             except error.PatchError as e:
                 raise error.Abort(stringutil.forcebytestr(e))
-            if opts.get('exact'):
+            if opts.get(b'exact'):
                 editor = None
             else:
-                editor = getcommiteditor(editform='import.bypass')
+                editor = getcommiteditor(editform=b'import.bypass')
             memctx = context.memctx(
                 repo,
                 (p1.node(), p2.node()),
@@ -1777,16 +1809,16 @@
             n = memctx.commit()
         finally:
             store.close()
-    if opts.get('exact') and nocommit:
+    if opts.get(b'exact') and nocommit:
         # --exact with --no-commit is still useful in that it does merge
         # and branch bits
-        ui.warn(_("warning: can't check exact import with --no-commit\n"))
-    elif opts.get('exact') and (not n or hex(n) != nodeid):
-        raise error.Abort(_('patch is damaged or loses information'))
-    msg = _('applied to working directory')
+        ui.warn(_(b"warning: can't check exact import with --no-commit\n"))
+    elif opts.get(b'exact') and (not n or hex(n) != nodeid):
+        raise error.Abort(_(b'patch is damaged or loses information'))
+    msg = _(b'applied to working directory')
     if n:
         # i18n: refers to a short changeset id
-        msg = _('created %s') % short(n)
+        msg = _(b'created %s') % short(n)
     return msg, n, rejects
 
 
@@ -1812,27 +1844,27 @@
         prev = nullid
 
     fm.context(ctx=ctx)
-    fm.plain('# HG changeset patch\n')
-    fm.write('user', '# User %s\n', ctx.user())
-    fm.plain('# Date %d %d\n' % ctx.date())
-    fm.write('date', '#      %s\n', fm.formatdate(ctx.date()))
+    fm.plain(b'# HG changeset patch\n')
+    fm.write(b'user', b'# User %s\n', ctx.user())
+    fm.plain(b'# Date %d %d\n' % ctx.date())
+    fm.write(b'date', b'#      %s\n', fm.formatdate(ctx.date()))
     fm.condwrite(
-        branch and branch != 'default', 'branch', '# Branch %s\n', branch
+        branch and branch != b'default', b'branch', b'# Branch %s\n', branch
     )
-    fm.write('node', '# Node ID %s\n', hex(node))
-    fm.plain('# Parent  %s\n' % hex(prev))
+    fm.write(b'node', b'# Node ID %s\n', hex(node))
+    fm.plain(b'# Parent  %s\n' % hex(prev))
     if len(parents) > 1:
-        fm.plain('# Parent  %s\n' % hex(parents[1]))
-    fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name='node'))
+        fm.plain(b'# Parent  %s\n' % hex(parents[1]))
+    fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name=b'node'))
 
     # TODO: redesign extraexportmap function to support formatter
     for headerid in extraexport:
         header = extraexportmap[headerid](seqno, ctx)
         if header is not None:
-            fm.plain('# %s\n' % header)
-
-    fm.write('desc', '%s\n', ctx.description().rstrip())
-    fm.plain('\n')
+            fm.plain(b'# %s\n' % header)
+
+    fm.write(b'desc', b'%s\n', ctx.description().rstrip())
+    fm.plain(b'\n')
 
     if fm.isplain():
         chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
@@ -1848,8 +1880,8 @@
     """Export changesets to stdout or a single file"""
     for seqno, rev in enumerate(revs, 1):
         ctx = repo[rev]
-        if not dest.startswith('<'):
-            repo.ui.note("%s\n" % dest)
+        if not dest.startswith(b'<'):
+            repo.ui.note(b"%s\n" % dest)
         fm.startitem()
         _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
 
@@ -1871,7 +1903,7 @@
 
     for dest in filemap:
         with formatter.maybereopen(basefm, dest) as fm:
-            repo.ui.note("%s\n" % dest)
+            repo.ui.note(b"%s\n" % dest)
             for seqno, rev in filemap[dest]:
                 fm.startitem()
                 ctx = repo[rev]
@@ -1893,7 +1925,7 @@
     repo,
     revs,
     basefm,
-    fntemplate='hg-%h.patch',
+    fntemplate=b'hg-%h.patch',
     switch_parent=False,
     opts=None,
     match=None,
@@ -1923,7 +1955,9 @@
     _prefetchchangedfiles(repo, revs, match)
 
     if not fntemplate:
-        _exportfile(repo, revs, basefm, '<unnamed>', switch_parent, opts, match)
+        _exportfile(
+            repo, revs, basefm, b'<unnamed>', switch_parent, opts, match
+        )
     else:
         _exportfntemplate(
             repo, revs, basefm, fntemplate, switch_parent, opts, match
@@ -1934,8 +1968,8 @@
     """Export changesets to the given file stream"""
     _prefetchchangedfiles(repo, revs, match)
 
-    dest = getattr(fp, 'name', '<unnamed>')
-    with formatter.formatter(repo.ui, fp, 'export', {}) as fm:
+    dest = getattr(fp, 'name', b'<unnamed>')
+    with formatter.formatter(repo.ui, fp, b'export', {}) as fm:
         _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
 
 
@@ -1944,26 +1978,31 @@
 
     To be used by debug function."""
     if index is not None:
-        fm.write('index', '%i ', index)
-    fm.write('prednode', '%s ', hex(marker.prednode()))
+        fm.write(b'index', b'%i ', index)
+    fm.write(b'prednode', b'%s ', hex(marker.prednode()))
     succs = marker.succnodes()
     fm.condwrite(
-        succs, 'succnodes', '%s ', fm.formatlist(map(hex, succs), name='node')
+        succs,
+        b'succnodes',
+        b'%s ',
+        fm.formatlist(map(hex, succs), name=b'node'),
     )
-    fm.write('flag', '%X ', marker.flags())
+    fm.write(b'flag', b'%X ', marker.flags())
     parents = marker.parentnodes()
     if parents is not None:
         fm.write(
-            'parentnodes',
-            '{%s} ',
-            fm.formatlist(map(hex, parents), name='node', sep=', '),
+            b'parentnodes',
+            b'{%s} ',
+            fm.formatlist(map(hex, parents), name=b'node', sep=b', '),
         )
-    fm.write('date', '(%s) ', fm.formatdate(marker.date()))
+    fm.write(b'date', b'(%s) ', fm.formatdate(marker.date()))
     meta = marker.metadata().copy()
-    meta.pop('date', None)
+    meta.pop(b'date', None)
     smeta = pycompat.rapply(pycompat.maybebytestr, meta)
-    fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
-    fm.plain('\n')
+    fm.write(
+        b'metadata', b'{%s}', fm.formatdict(smeta, fmt=b'%r: %r', sep=b', ')
+    )
+    fm.plain(b'\n')
 
 
 def finddate(ui, repo, date):
@@ -1978,16 +2017,16 @@
         if df(d[0]):
             results[ctx.rev()] = d
 
-    for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
+    for ctx in walkchangerevs(repo, m, {b'rev': None}, prep):
         rev = ctx.rev()
         if rev in results:
             ui.status(
-                _("found revision %s from %s\n")
+                _(b"found revision %s from %s\n")
                 % (rev, dateutil.datestr(results[rev]))
             )
-            return '%d' % rev
-
-    raise error.Abort(_("revision matching date not found"))
+            return b'%d' % rev
+
+    raise error.Abort(_(b"revision matching date not found"))
 
 
 def increasingwindows(windowsize=8, sizelimit=512):
@@ -2000,13 +2039,13 @@
 def _walkrevs(repo, opts):
     # Default --rev value depends on --follow but --follow behavior
     # depends on revisions resolved from --rev...
-    follow = opts.get('follow') or opts.get('follow_first')
-    if opts.get('rev'):
-        revs = scmutil.revrange(repo, opts['rev'])
+    follow = opts.get(b'follow') or opts.get(b'follow_first')
+    if opts.get(b'rev'):
+        revs = scmutil.revrange(repo, opts[b'rev'])
     elif follow and repo.dirstate.p1() == nullid:
         revs = smartset.baseset()
     elif follow:
-        revs = repo.revs('reverse(:.)')
+        revs = repo.revs(b'reverse(:.)')
     else:
         revs = smartset.spanset(repo)
         revs.reverse()
@@ -2060,12 +2099,15 @@
         return reversed(revs)
 
     def iterfiles():
-        pctx = repo['.']
+        pctx = repo[b'.']
         for filename in match.files():
             if follow:
                 if filename not in pctx:
                     raise error.Abort(
-                        _('cannot follow file not in parent ' 'revision: "%s"')
+                        _(
+                            b'cannot follow file not in parent '
+                            b'revision: "%s"'
+                        )
                         % filename
                     )
                 yield filename, pctx[filename].filenode()
@@ -2082,9 +2124,9 @@
                 # try to find matching entries on the slow path.
                 if follow:
                     raise error.Abort(
-                        _('cannot follow nonexistent file: "%s"') % file_
+                        _(b'cannot follow nonexistent file: "%s"') % file_
                     )
-                raise FileWalkError("Cannot walk via filelog")
+                raise FileWalkError(b"Cannot walk via filelog")
             else:
                 continue
 
@@ -2176,13 +2218,13 @@
     yielding each context, the iterator will first call the prepare
     function on each context in the window in forward order.'''
 
-    allfiles = opts.get('all_files')
-    follow = opts.get('follow') or opts.get('follow_first')
+    allfiles = opts.get(b'all_files')
+    follow = opts.get(b'follow') or opts.get(b'follow_first')
     revs = _walkrevs(repo, opts)
     if not revs:
         return []
     wanted = set()
-    slowpath = match.anypats() or (not match.always() and opts.get('removed'))
+    slowpath = match.anypats() or (not match.always() and opts.get(b'removed'))
     fncache = {}
     change = repo.__getitem__
 
@@ -2206,7 +2248,7 @@
             # of the paths was not a file. Check to see if at least one of them
             # existed in history, otherwise simply return
             for path in match.files():
-                if path == '.' or path in repo.store:
+                if path == b'.' or path in repo.store:
                     break
             else:
                 return []
@@ -2217,7 +2259,7 @@
 
         if follow:
             raise error.Abort(
-                _('can only follow copies/renames for explicit ' 'filenames')
+                _(b'can only follow copies/renames for explicit ' b'filenames')
             )
 
         # The slow path checks files modified in every changeset.
@@ -2255,7 +2297,7 @@
 
     # it might be worthwhile to do this in the iterator if the rev range
     # is descending and the prune args are all within that range
-    for rev in opts.get('prune', ()):
+    for rev in opts.get(b'prune', ()):
         rev = repo[rev].rev()
         ff = _followfilter(repo)
         stop = min(revs[0], revs[-1])
@@ -2267,7 +2309,7 @@
     # revision range, yielding only revisions in wanted.
     def iterate():
         if follow and match.always():
-            ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
+            ff = _followfilter(repo, onlyfirst=opts.get(b'follow_first'))
 
             def want(rev):
                 return ff.match(rev) and rev in wanted
@@ -2345,7 +2387,7 @@
             names.append(f)
             if ui.verbose or not exact:
                 ui.status(
-                    _('adding %s\n') % uipathfn(f), label='ui.addremove.added'
+                    _(b'adding %s\n') % uipathfn(f), label=b'ui.addremove.added'
                 )
 
     for subpath in sorted(wctx.substate):
@@ -2364,7 +2406,7 @@
                 )
         except error.LookupError:
             ui.status(
-                _("skipping missing subrepository: %s\n") % uipathfn(subpath)
+                _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
             )
 
     if not opts.get(r'dry_run'):
@@ -2375,9 +2417,9 @@
 
 def addwebdirpath(repo, serverpath, webconf):
     webconf[serverpath] = repo.root
-    repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
-
-    for r in repo.revs('filelog("path:.hgsub")'):
+    repo.ui.debug(b'adding %s = %s\n' % (serverpath, repo.root))
+
+    for r in repo.revs(b'filelog("path:.hgsub")'):
         ctx = repo[r]
         for subpath in ctx.substate:
             ctx.sub(subpath).addwebdirpath(serverpath, webconf)
@@ -2387,7 +2429,7 @@
     ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
 ):
     if dryrun and interactive:
-        raise error.Abort(_("cannot specify both --dry-run and --interactive"))
+        raise error.Abort(_(b"cannot specify both --dry-run and --interactive"))
     bad = []
     badfn = lambda x, y: bad.append(x) or match.bad(x, y)
     wctx = repo[None]
@@ -2411,11 +2453,11 @@
                 dryrun=dryrun,
                 interactive=interactive,
             )
-            bad.extend([subpath + '/' + f for f in subbad])
-            forgot.extend([subpath + '/' + f for f in subforgot])
+            bad.extend([subpath + b'/' + f for f in subbad])
+            forgot.extend([subpath + b'/' + f for f in subforgot])
         except error.LookupError:
             ui.status(
-                _("skipping missing subrepository: %s\n") % uipathfn(subpath)
+                _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
             )
 
     if not explicitonly:
@@ -2430,30 +2472,33 @@
                         if repo.dirstate.normalize(f) in repo.dirstate:
                             continue
                         ui.warn(
-                            _('not removing %s: ' 'file is already untracked\n')
+                            _(
+                                b'not removing %s: '
+                                b'file is already untracked\n'
+                            )
                             % uipathfn(f)
                         )
                     bad.append(f)
 
     if interactive:
         responses = _(
-            '[Ynsa?]'
-            '$$ &Yes, forget this file'
-            '$$ &No, skip this file'
-            '$$ &Skip remaining files'
-            '$$ Include &all remaining files'
-            '$$ &? (display help)'
+            b'[Ynsa?]'
+            b'$$ &Yes, forget this file'
+            b'$$ &No, skip this file'
+            b'$$ &Skip remaining files'
+            b'$$ Include &all remaining files'
+            b'$$ &? (display help)'
         )
         for filename in forget[:]:
             r = ui.promptchoice(
-                _('forget %s %s') % (uipathfn(filename), responses)
+                _(b'forget %s %s') % (uipathfn(filename), responses)
             )
             if r == 4:  # ?
                 while r == 4:
                     for c, t in ui.extractchoices(responses)[1]:
-                        ui.write('%s - %s\n' % (c, encoding.lower(t)))
+                        ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
                     r = ui.promptchoice(
-                        _('forget %s %s') % (uipathfn(filename), responses)
+                        _(b'forget %s %s') % (uipathfn(filename), responses)
                     )
             if r == 0:  # yes
                 continue
@@ -2469,7 +2514,7 @@
     for f in forget:
         if ui.verbose or not match.exact(f) or interactive:
             ui.status(
-                _('removing %s\n') % uipathfn(f), label='ui.addremove.removed'
+                _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
             )
 
     if not dryrun:
@@ -2482,13 +2527,13 @@
 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
     ret = 1
 
-    needsfctx = ui.verbose or {'size', 'flags'} & fm.datahint()
+    needsfctx = ui.verbose or {b'size', b'flags'} & fm.datahint()
     for f in ctx.matches(m):
         fm.startitem()
         fm.context(ctx=ctx)
         if needsfctx:
             fc = ctx[f]
-            fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
+            fm.write(b'size flags', b'% 10d % 1s ', fc.size(), fc.flags())
         fm.data(path=f)
         fm.plain(fmt % uipathfn(f))
         ret = 0
@@ -2507,7 +2552,7 @@
                     ret = 0
             except error.LookupError:
                 ui.status(
-                    _("skipping missing subrepository: %s\n")
+                    _(b"skipping missing subrepository: %s\n")
                     % uipathfn(subpath)
                 )
 
@@ -2531,7 +2576,7 @@
 
     subs = sorted(wctx.substate)
     progress = ui.makeprogress(
-        _('searching'), total=len(subs), unit=_('subrepos')
+        _(b'searching'), total=len(subs), unit=_(b'subrepos')
     )
     for subpath in subs:
         submatch = matchmod.subdirmatcher(subpath, m)
@@ -2554,7 +2599,7 @@
                     ret = 1
             except error.LookupError:
                 warnings.append(
-                    _("skipping missing subrepository: %s\n")
+                    _(b"skipping missing subrepository: %s\n")
                     % uipathfn(subpath)
                 )
     progress.complete()
@@ -2562,28 +2607,30 @@
     # warn about failure to delete explicit files/dirs
     deleteddirs = util.dirs(deleted)
     files = m.files()
-    progress = ui.makeprogress(_('deleting'), total=len(files), unit=_('files'))
+    progress = ui.makeprogress(
+        _(b'deleting'), total=len(files), unit=_(b'files')
+    )
     for f in files:
 
         def insubrepo():
             for subpath in wctx.substate:
-                if f.startswith(subpath + '/'):
+                if f.startswith(subpath + b'/'):
                     return True
             return False
 
         progress.increment()
         isdir = f in deleteddirs or wctx.hasdir(f)
-        if f in repo.dirstate or isdir or f == '.' or insubrepo() or f in subs:
+        if f in repo.dirstate or isdir or f == b'.' or insubrepo() or f in subs:
             continue
 
         if repo.wvfs.exists(f):
             if repo.wvfs.isdir(f):
                 warnings.append(
-                    _('not removing %s: no tracked files\n') % uipathfn(f)
+                    _(b'not removing %s: no tracked files\n') % uipathfn(f)
                 )
             else:
                 warnings.append(
-                    _('not removing %s: file is untracked\n') % uipathfn(f)
+                    _(b'not removing %s: file is untracked\n') % uipathfn(f)
                 )
         # missing files will generate a warning elsewhere
         ret = 1
@@ -2595,27 +2642,27 @@
         list = deleted
         remaining = modified + added + clean
         progress = ui.makeprogress(
-            _('skipping'), total=len(remaining), unit=_('files')
+            _(b'skipping'), total=len(remaining), unit=_(b'files')
         )
         for f in remaining:
             progress.increment()
             if ui.verbose or (f in files):
                 warnings.append(
-                    _('not removing %s: file still exists\n') % uipathfn(f)
+                    _(b'not removing %s: file still exists\n') % uipathfn(f)
                 )
             ret = 1
         progress.complete()
     else:
         list = deleted + clean
         progress = ui.makeprogress(
-            _('skipping'), total=(len(modified) + len(added)), unit=_('files')
+            _(b'skipping'), total=(len(modified) + len(added)), unit=_(b'files')
         )
         for f in modified:
             progress.increment()
             warnings.append(
                 _(
-                    'not removing %s: file is modified (use -f'
-                    ' to force removal)\n'
+                    b'not removing %s: file is modified (use -f'
+                    b' to force removal)\n'
                 )
                 % uipathfn(f)
             )
@@ -2624,8 +2671,8 @@
             progress.increment()
             warnings.append(
                 _(
-                    "not removing %s: file has been marked for add"
-                    " (use 'hg forget' to undo add)\n"
+                    b"not removing %s: file has been marked for add"
+                    b" (use 'hg forget' to undo add)\n"
                 )
                 % uipathfn(f)
             )
@@ -2633,12 +2680,14 @@
         progress.complete()
 
     list = sorted(list)
-    progress = ui.makeprogress(_('deleting'), total=len(list), unit=_('files'))
+    progress = ui.makeprogress(
+        _(b'deleting'), total=len(list), unit=_(b'files')
+    )
     for f in list:
         if ui.verbose or not m.exact(f):
             progress.increment()
             ui.status(
-                _('removing %s\n') % uipathfn(f), label='ui.addremove.removed'
+                _(b'removing %s\n') % uipathfn(f), label=b'ui.addremove.removed'
             )
     progress.complete()
 
@@ -2649,7 +2698,7 @@
                     if f in added:
                         continue  # we never unlink added files on remove
                     rmdir = repo.ui.configbool(
-                        'experimental', 'removeemptydirs'
+                        b'experimental', b'removeemptydirs'
                     )
                     repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
             repo[None].forget(list)
@@ -2662,7 +2711,7 @@
 
 
 def _catfmtneedsdata(fm):
-    return not fm.datahint() or 'data' in fm.datahint()
+    return not fm.datahint() or b'data' in fm.datahint()
 
 
 def _updatecatformatter(fm, ctx, matcher, path, decode):
@@ -2680,7 +2729,7 @@
             data = ctx.repo().wwritedata(path, data)
     fm.startitem()
     fm.context(ctx=ctx)
-    fm.write('data', '%s', data)
+    fm.write(b'data', b'%s', data)
     fm.data(path=path)
 
 
@@ -2700,7 +2749,7 @@
             except OSError:
                 pass
         with formatter.maybereopen(basefm, filename) as fm:
-            _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
+            _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
 
     # Automation often uses hg cat on single files, so special case it
     # for performance to avoid the cost of parsing the manifest.
@@ -2740,7 +2789,7 @@
                 err = 0
         except error.RepoLookupError:
             ui.status(
-                _("skipping missing subrepository: %s\n") % uipathfn(subpath)
+                _(b"skipping missing subrepository: %s\n") % uipathfn(subpath)
             )
 
     return err
@@ -2748,24 +2797,24 @@
 
 def commit(ui, repo, commitfunc, pats, opts):
     '''commit the specified files or all outstanding changes'''
-    date = opts.get('date')
+    date = opts.get(b'date')
     if date:
-        opts['date'] = dateutil.parsedate(date)
+        opts[b'date'] = dateutil.parsedate(date)
     message = logmessage(ui, opts)
     matcher = scmutil.match(repo[None], pats, opts)
 
     dsguard = None
     # extract addremove carefully -- this function can be called from a command
     # that doesn't support addremove
-    if opts.get('addremove'):
-        dsguard = dirstateguard.dirstateguard(repo, 'commit')
+    if opts.get(b'addremove'):
+        dsguard = dirstateguard.dirstateguard(repo, b'commit')
     with dsguard or util.nullcontextmanager():
         if dsguard:
             relative = scmutil.anypats(pats, opts)
             uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
-            if scmutil.addremove(repo, matcher, "", uipathfn, opts) != 0:
+            if scmutil.addremove(repo, matcher, b"", uipathfn, opts) != 0:
                 raise error.Abort(
-                    _("failed to mark all new/missing files as added/removed")
+                    _(b"failed to mark all new/missing files as added/removed")
                 )
 
         return commitfunc(ui, repo, message, matcher, opts)
@@ -2792,10 +2841,10 @@
     if obsolete.isenabled(repo, obsolete.createmarkersopt):
         ui.username()  # raise exception if username not set
 
-    ui.note(_('amending changeset %s\n') % old)
+    ui.note(_(b'amending changeset %s\n') % old)
     base = old.p1()
 
-    with repo.wlock(), repo.lock(), repo.transaction('amend'):
+    with repo.wlock(), repo.lock(), repo.transaction(b'amend'):
         # Participating changesets:
         #
         # wctx     o - workingctx that contains changes from working copy
@@ -2819,9 +2868,9 @@
         datemaydiffer = resolvecommitoptions(ui, opts)
 
         date = old.date()
-        if opts.get('date'):
-            date = dateutil.parsedate(opts.get('date'))
-        user = opts.get('user') or old.user()
+        if opts.get(b'date'):
+            date = dateutil.parsedate(opts.get(b'date'))
+        user = opts.get(b'user') or old.user()
 
         if len(old.parents()) > 1:
             # ctx.files() isn't reliable for merges, so fall back to the
@@ -2835,17 +2884,17 @@
         matcher = scmutil.match(wctx, pats, opts)
         relative = scmutil.anypats(pats, opts)
         uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
-        if opts.get('addremove') and scmutil.addremove(
-            repo, matcher, "", uipathfn, opts
+        if opts.get(b'addremove') and scmutil.addremove(
+            repo, matcher, b"", uipathfn, opts
         ):
             raise error.Abort(
-                _("failed to mark all new/missing files as added/removed")
+                _(b"failed to mark all new/missing files as added/removed")
             )
 
         # Check subrepos. This depends on in-place wctx._status update in
         # subrepo.precommit(). To minimize the risk of this hack, we do
         # nothing if .hgsub does not exist.
-        if '.hgsub' in wctx or '.hgsub' in old:
+        if b'.hgsub' in wctx or b'.hgsub' in old:
             subs, commitsubs, newsubstate = subrepoutil.precommit(
                 ui, wctx, wctx._status, matcher
             )
@@ -2900,8 +2949,8 @@
                         ctx_,
                         fctx.path(),
                         fctx.data(),
-                        islink='l' in flags,
-                        isexec='x' in flags,
+                        islink=b'l' in flags,
+                        isexec=b'x' in flags,
                         copysource=copied.get(path),
                     )
                     return mctx
@@ -2909,7 +2958,7 @@
                     return None
 
         else:
-            ui.note(_('copying changeset %s to %s\n') % (old, base))
+            ui.note(_(b'copying changeset %s to %s\n') % (old, base))
 
             # Use version of files as in the old cset
             def filectxfn(repo, ctx_, path):
@@ -2922,22 +2971,22 @@
         # the message of the changeset to amend.
         message = logmessage(ui, opts)
 
-        editform = mergeeditform(old, 'commit.amend')
+        editform = mergeeditform(old, b'commit.amend')
 
         if not message:
             message = old.description()
             # Default if message isn't provided and --edit is not passed is to
             # invoke editor, but allow --no-edit. If somehow we don't have any
             # description, let's always start the editor.
-            doedit = not message or opts.get('edit') in [True, None]
+            doedit = not message or opts.get(b'edit') in [True, None]
         else:
             # Default if message is provided is to not invoke editor, but allow
             # --edit.
-            doedit = opts.get('edit') is True
+            doedit = opts.get(b'edit') is True
         editor = getcommiteditor(edit=doedit, editform=editform)
 
         pureextra = extra.copy()
-        extra['amend_source'] = old.hex()
+        extra[b'amend_source'] = old.hex()
 
         new = context.memctx(
             repo,
@@ -2966,7 +3015,7 @@
             return old.node()
 
         commitphase = None
-        if opts.get('secret'):
+        if opts.get(b'secret'):
             commitphase = phases.secret
         newid = repo.commitctx(new)
 
@@ -2974,13 +3023,13 @@
         repo.setparents(newid, nullid)
         mapping = {old.node(): (newid,)}
         obsmetadata = None
-        if opts.get('note'):
-            obsmetadata = {'note': encoding.fromlocal(opts['note'])}
-        backup = ui.configbool('rewrite', 'backup-bundle')
+        if opts.get(b'note'):
+            obsmetadata = {b'note': encoding.fromlocal(opts[b'note'])}
+        backup = ui.configbool(b'rewrite', b'backup-bundle')
         scmutil.cleanupnodes(
             repo,
             mapping,
-            'amend',
+            b'amend',
             metadata=obsmetadata,
             fixphase=True,
             targetphase=commitphase,
@@ -3009,7 +3058,7 @@
     return newid
 
 
-def commiteditor(repo, ctx, subs, editform=''):
+def commiteditor(repo, ctx, subs, editform=b''):
     if ctx.description():
         return ctx.description()
     return commitforceeditor(
@@ -3023,18 +3072,18 @@
     subs,
     finishdesc=None,
     extramsg=None,
-    editform='',
+    editform=b'',
     unchangedmessagedetection=False,
 ):
     if not extramsg:
-        extramsg = _("Leave message empty to abort commit.")
-
-    forms = [e for e in editform.split('.') if e]
-    forms.insert(0, 'changeset')
+        extramsg = _(b"Leave message empty to abort commit.")
+
+    forms = [e for e in editform.split(b'.') if e]
+    forms.insert(0, b'changeset')
     templatetext = None
     while forms:
-        ref = '.'.join(forms)
-        if repo.ui.config('committemplate', ref):
+        ref = b'.'.join(forms)
+        if repo.ui.config(b'committemplate', ref):
             templatetext = committext = buildcommittemplate(
                 repo, ctx, subs, extramsg, ref
             )
@@ -3059,7 +3108,7 @@
         editform=editform,
         pending=pending,
         repopath=repo.path,
-        action='commit',
+        action=b'commit',
     )
     text = editortext
 
@@ -3069,15 +3118,15 @@
     if stripbelow:
         text = text[: stripbelow.start()]
 
-    text = re.sub("(?m)^HG:.*(\n|$)", "", text)
+    text = re.sub(b"(?m)^HG:.*(\n|$)", b"", text)
     os.chdir(olddir)
 
     if finishdesc:
         text = finishdesc(text)
     if not text.strip():
-        raise error.Abort(_("empty commit message"))
+        raise error.Abort(_(b"empty commit message"))
     if unchangedmessagedetection and editortext == templatetext:
-        raise error.Abort(_("commit message unchanged"))
+        raise error.Abort(_(b"commit message unchanged"))
 
     return text
 
@@ -3088,11 +3137,11 @@
     t = logcmdutil.changesettemplater(ui, repo, spec)
     t.t.cache.update(
         (k, templater.unquotestring(v))
-        for k, v in repo.ui.configitems('committemplate')
+        for k, v in repo.ui.configitems(b'committemplate')
     )
 
     if not extramsg:
-        extramsg = ''  # ensure that extramsg is string
+        extramsg = b''  # ensure that extramsg is string
 
     ui.pushbuffer()
     t.show(ctx, extramsg=extramsg)
@@ -3100,7 +3149,7 @@
 
 
 def hgprefix(msg):
-    return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
+    return b"\n".join([b"HG: %s" % a for a in msg.split(b"\n") if a])
 
 
 def buildcommittext(repo, ctx, subs, extramsg):
@@ -3108,34 +3157,34 @@
     modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
     if ctx.description():
         edittext.append(ctx.description())
-    edittext.append("")
-    edittext.append("")  # Empty line between message and comments.
+    edittext.append(b"")
+    edittext.append(b"")  # Empty line between message and comments.
     edittext.append(
         hgprefix(
             _(
-                "Enter commit message."
-                "  Lines beginning with 'HG:' are removed."
+                b"Enter commit message."
+                b"  Lines beginning with 'HG:' are removed."
             )
         )
     )
     edittext.append(hgprefix(extramsg))
-    edittext.append("HG: --")
-    edittext.append(hgprefix(_("user: %s") % ctx.user()))
+    edittext.append(b"HG: --")
+    edittext.append(hgprefix(_(b"user: %s") % ctx.user()))
     if ctx.p2():
-        edittext.append(hgprefix(_("branch merge")))
+        edittext.append(hgprefix(_(b"branch merge")))
     if ctx.branch():
-        edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
+        edittext.append(hgprefix(_(b"branch '%s'") % ctx.branch()))
     if bookmarks.isactivewdirparent(repo):
-        edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
-    edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
-    edittext.extend([hgprefix(_("added %s") % f) for f in added])
-    edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
-    edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
+        edittext.append(hgprefix(_(b"bookmark '%s'") % repo._activebookmark))
+    edittext.extend([hgprefix(_(b"subrepo %s") % s) for s in subs])
+    edittext.extend([hgprefix(_(b"added %s") % f) for f in added])
+    edittext.extend([hgprefix(_(b"changed %s") % f) for f in modified])
+    edittext.extend([hgprefix(_(b"removed %s") % f) for f in removed])
     if not added and not modified and not removed:
-        edittext.append(hgprefix(_("no files changed")))
-    edittext.append("")
-
-    return "\n".join(edittext)
+        edittext.append(hgprefix(_(b"no files changed")))
+    edittext.append(b"")
+
+    return b"\n".join(edittext)
 
 
 def commitstatus(repo, node, branch, bheads=None, opts=None):
@@ -3145,14 +3194,14 @@
     parents = ctx.parents()
 
     if (
-        not opts.get('amend')
+        not opts.get(b'amend')
         and bheads
         and node not in bheads
         and not [
             x for x in parents if x.node() in bheads and x.branch() == branch
         ]
     ):
-        repo.ui.status(_('created new head\n'))
+        repo.ui.status(_(b'created new head\n'))
         # The message is not printed for initial roots. For the other
         # changesets, it is printed in the following situations:
         #
@@ -3182,15 +3231,19 @@
         #
         # H H  n  head merge: head count decreases
 
-    if not opts.get('close_branch'):
+    if not opts.get(b'close_branch'):
         for r in parents:
             if r.closesbranch() and r.branch() == branch:
-                repo.ui.status(_('reopening closed branch head %d\n') % r.rev())
+                repo.ui.status(
+                    _(b'reopening closed branch head %d\n') % r.rev()
+                )
 
     if repo.ui.debugflag:
-        repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx.hex()))
+        repo.ui.write(
+            _(b'committed changeset %d:%s\n') % (ctx.rev(), ctx.hex())
+        )
     elif repo.ui.verbose:
-        repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx))
+        repo.ui.write(_(b'committed changeset %d:%s\n') % (ctx.rev(), ctx))
 
 
 def postcommitstatus(repo, pats, opts):
@@ -3222,7 +3275,7 @@
         ## filling of the `names` mapping
         # walk dirstate to fill `names`
 
-        interactive = opts.get('interactive', False)
+        interactive = opts.get(b'interactive', False)
         wctx = repo[None]
         m = scmutil.match(wctx, pats, opts)
 
@@ -3241,11 +3294,11 @@
                     return
                 if path in ctx.substate:
                     return
-                path_ = path + '/'
+                path_ = path + b'/'
                 for f in names:
                     if f.startswith(path_):
                         return
-                ui.warn("%s: %s\n" % (uipathfn(path), msg))
+                ui.warn(b"%s: %s\n" % (uipathfn(path), msg))
 
             for abs in ctx.walk(matchmod.badmatch(m, badfn)):
                 if abs not in names:
@@ -3326,7 +3379,7 @@
         for f in localchanges:
             src = repo.dirstate.copied(f)
             # XXX should we check for rename down to target node?
-            if src and src not in names and repo.dirstate[src] == 'r':
+            if src and src not in names and repo.dirstate[src] == b'r':
                 dsremoved.add(src)
                 names[src] = True
 
@@ -3340,12 +3393,12 @@
         # distinguish between file to forget and the other
         added = set()
         for abs in dsadded:
-            if repo.dirstate[abs] != 'a':
+            if repo.dirstate[abs] != b'a':
                 added.add(abs)
         dsadded -= added
 
         for abs in deladded:
-            if repo.dirstate[abs] == 'a':
+            if repo.dirstate[abs] == b'a':
                 dsadded.add(abs)
         deladded -= dsadded
 
@@ -3370,14 +3423,14 @@
         # action to be actually performed by revert
         # (<list of file>, message>) tuple
         actions = {
-            'revert': ([], _('reverting %s\n')),
-            'add': ([], _('adding %s\n')),
-            'remove': ([], _('removing %s\n')),
-            'drop': ([], _('removing %s\n')),
-            'forget': ([], _('forgetting %s\n')),
-            'undelete': ([], _('undeleting %s\n')),
-            'noop': (None, _('no changes needed to %s\n')),
-            'unknown': (None, _('file not managed: %s\n')),
+            b'revert': ([], _(b'reverting %s\n')),
+            b'add': ([], _(b'adding %s\n')),
+            b'remove': ([], _(b'removing %s\n')),
+            b'drop': ([], _(b'removing %s\n')),
+            b'forget': ([], _(b'forgetting %s\n')),
+            b'undelete': ([], _(b'undeleting %s\n')),
+            b'noop': (None, _(b'no changes needed to %s\n')),
+            b'unknown': (None, _(b'file not managed: %s\n')),
         }
 
         # "constant" that convey the backup strategy.
@@ -3388,7 +3441,7 @@
         backup = 2  # unconditionally do backup
         check = 1  # check if the existing file differs from target
         discard = 0  # never do backup
-        if opts.get('no_backup'):
+        if opts.get(b'no_backup'):
             backupinteractive = backup = check = discard
         if interactive:
             dsmodifiedbackup = backupinteractive
@@ -3396,9 +3449,9 @@
             dsmodifiedbackup = backup
         tobackup = set()
 
-        backupanddel = actions['remove']
-        if not opts.get('no_backup'):
-            backupanddel = actions['drop']
+        backupanddel = actions[b'remove']
+        if not opts.get(b'no_backup'):
+            backupanddel = actions[b'drop']
 
         disptable = (
             # dispatch table:
@@ -3407,32 +3460,32 @@
             #   make backup
             ## Sets that results that will change file on disk
             # Modified compared to target, no local change
-            (modified, actions['revert'], discard),
+            (modified, actions[b'revert'], discard),
             # Modified compared to target, but local file is deleted
-            (deleted, actions['revert'], discard),
+            (deleted, actions[b'revert'], discard),
             # Modified compared to target, local change
-            (dsmodified, actions['revert'], dsmodifiedbackup),
+            (dsmodified, actions[b'revert'], dsmodifiedbackup),
             # Added since target
-            (added, actions['remove'], discard),
+            (added, actions[b'remove'], discard),
             # Added in working directory
-            (dsadded, actions['forget'], discard),
+            (dsadded, actions[b'forget'], discard),
             # Added since target, have local modification
             (modadded, backupanddel, backup),
             # Added since target but file is missing in working directory
-            (deladded, actions['drop'], discard),
+            (deladded, actions[b'drop'], discard),
             # Removed since  target, before working copy parent
-            (removed, actions['add'], discard),
+            (removed, actions[b'add'], discard),
             # Same as `removed` but an unknown file exists at the same path
-            (removunk, actions['add'], check),
+            (removunk, actions[b'add'], check),
             # Removed since targe, marked as such in working copy parent
-            (dsremoved, actions['undelete'], discard),
+            (dsremoved, actions[b'undelete'], discard),
             # Same as `dsremoved` but an unknown file exists at the same path
-            (dsremovunk, actions['undelete'], check),
+            (dsremovunk, actions[b'undelete'], check),
             ## the following sets does not result in any file changes
             # File with no modification
-            (clean, actions['noop'], discard),
+            (clean, actions[b'noop'], discard),
             # Existing file, not tracked anywhere
-            (unknown, actions['unknown'], discard),
+            (unknown, actions[b'unknown'], discard),
         )
 
         for abs, exact in sorted(names.items()):
@@ -3457,23 +3510,23 @@
                                 absbakname, start=repo.root
                             )
                             ui.note(
-                                _('saving current version of %s as %s\n')
+                                _(b'saving current version of %s as %s\n')
                                 % (uipathfn(abs), uipathfn(bakname))
                             )
-                            if not opts.get('dry_run'):
+                            if not opts.get(b'dry_run'):
                                 if interactive:
                                     util.copyfile(target, absbakname)
                                 else:
                                     util.rename(target, absbakname)
-                    if opts.get('dry_run'):
+                    if opts.get(b'dry_run'):
                         if ui.verbose or not exact:
                             ui.status(msg % uipathfn(abs))
                 elif exact:
                     ui.warn(msg % uipathfn(abs))
                 break
 
-        if not opts.get('dry_run'):
-            needdata = ('revert', 'add', 'undelete')
+        if not opts.get(b'dry_run'):
+            needdata = (b'revert', b'add', b'undelete')
             oplist = [actions[name][0] for name in needdata]
             prefetch = scmutil.prefetchfiles
             matchfiles = scmutil.matchfiles
@@ -3504,7 +3557,7 @@
                     )
                 except KeyError:
                     raise error.Abort(
-                        "subrepository '%s' does not exist in %s!"
+                        b"subrepository '%s' does not exist in %s!"
                         % (sub, short(ctx.node()))
                     )
 
@@ -3537,7 +3590,7 @@
 
     def doremove(f):
         try:
-            rmdir = repo.ui.configbool('experimental', 'removeemptydirs')
+            rmdir = repo.ui.configbool(b'experimental', b'removeemptydirs')
             repo.wvfs.unlinkpath(f, rmdir=rmdir)
         except OSError:
             pass
@@ -3549,36 +3602,36 @@
             repo.ui.status(actions[action][1] % uipathfn(f))
 
     audit_path = pathutil.pathauditor(repo.root, cached=True)
-    for f in actions['forget'][0]:
+    for f in actions[b'forget'][0]:
         if interactive:
             choice = repo.ui.promptchoice(
-                _("forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
+                _(b"forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
             )
             if choice == 0:
-                prntstatusmsg('forget', f)
+                prntstatusmsg(b'forget', f)
                 repo.dirstate.drop(f)
             else:
                 excluded_files.append(f)
         else:
-            prntstatusmsg('forget', f)
+            prntstatusmsg(b'forget', f)
             repo.dirstate.drop(f)
-    for f in actions['remove'][0]:
+    for f in actions[b'remove'][0]:
         audit_path(f)
         if interactive:
             choice = repo.ui.promptchoice(
-                _("remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
+                _(b"remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
             )
             if choice == 0:
-                prntstatusmsg('remove', f)
+                prntstatusmsg(b'remove', f)
                 doremove(f)
             else:
                 excluded_files.append(f)
         else:
-            prntstatusmsg('remove', f)
+            prntstatusmsg(b'remove', f)
             doremove(f)
-    for f in actions['drop'][0]:
+    for f in actions[b'drop'][0]:
         audit_path(f)
-        prntstatusmsg('drop', f)
+        prntstatusmsg(b'drop', f)
         repo.dirstate.remove(f)
 
     normal = None
@@ -3594,26 +3647,26 @@
     newlyaddedandmodifiedfiles = set()
     if interactive:
         # Prompt the user for changes to revert
-        torevert = [f for f in actions['revert'][0] if f not in excluded_files]
+        torevert = [f for f in actions[b'revert'][0] if f not in excluded_files]
         m = scmutil.matchfiles(repo, torevert)
         diffopts = patch.difffeatureopts(
             repo.ui,
             whitespace=True,
-            section='commands',
-            configprefix='revert.interactive.',
+            section=b'commands',
+            configprefix=b'revert.interactive.',
         )
         diffopts.nodates = True
         diffopts.git = True
-        operation = 'apply'
+        operation = b'apply'
         if node == parent:
             if repo.ui.configbool(
-                'experimental', 'revert.interactive.select-to-keep'
+                b'experimental', b'revert.interactive.select-to-keep'
             ):
-                operation = 'keep'
+                operation = b'keep'
             else:
-                operation = 'discard'
-
-        if operation == 'apply':
+                operation = b'discard'
+
+        if operation == b'apply':
             diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
         else:
             diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
@@ -3624,11 +3677,11 @@
             chunks, opts = recordfilter(
                 repo.ui, originalchunks, match, operation=operation
             )
-            if operation == 'discard':
+            if operation == b'discard':
                 chunks = patch.reversehunks(chunks)
 
         except error.PatchError as err:
-            raise error.Abort(_('error parsing patch: %s') % err)
+            raise error.Abort(_(b'error parsing patch: %s') % err)
 
         # FIXME: when doing an interactive revert of a copy, there's no way of
         # performing a partial revert of the added file, the only option is
@@ -3644,7 +3697,7 @@
         fp = stringio()
         # chunks are serialized per file, but files aren't sorted
         for f in sorted(set(c.header.filename() for c in chunks if ishunk(c))):
-            prntstatusmsg('revert', f)
+            prntstatusmsg(b'revert', f)
         files = set()
         for c in chunks:
             if ishunk(c):
@@ -3657,7 +3710,7 @@
                     tobackup.remove(abs)
                 if abs not in files:
                     files.add(abs)
-                    if operation == 'keep':
+                    if operation == b'keep':
                         checkout(abs)
             c.write(fp)
         dopatch = fp.tell()
@@ -3669,41 +3722,43 @@
                 raise error.Abort(pycompat.bytestr(err))
         del fp
     else:
-        for f in actions['revert'][0]:
-            prntstatusmsg('revert', f)
+        for f in actions[b'revert'][0]:
+            prntstatusmsg(b'revert', f)
             checkout(f)
             if normal:
                 normal(f)
 
-    for f in actions['add'][0]:
+    for f in actions[b'add'][0]:
         # Don't checkout modified files, they are already created by the diff
         if f not in newlyaddedandmodifiedfiles:
-            prntstatusmsg('add', f)
+            prntstatusmsg(b'add', f)
             checkout(f)
             repo.dirstate.add(f)
 
     normal = repo.dirstate.normallookup
     if node == parent and p2 == nullid:
         normal = repo.dirstate.normal
-    for f in actions['undelete'][0]:
+    for f in actions[b'undelete'][0]:
         if interactive:
             choice = repo.ui.promptchoice(
-                _("add back removed file %s (Yn)?$$ &Yes $$ &No") % f
+                _(b"add back removed file %s (Yn)?$$ &Yes $$ &No") % f
             )
             if choice == 0:
-                prntstatusmsg('undelete', f)
+                prntstatusmsg(b'undelete', f)
                 checkout(f)
                 normal(f)
             else:
                 excluded_files.append(f)
         else:
-            prntstatusmsg('undelete', f)
+            prntstatusmsg(b'undelete', f)
             checkout(f)
             normal(f)
 
     copied = copies.pathcopies(repo[parent], ctx)
 
-    for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
+    for f in (
+        actions[b'add'][0] + actions[b'undelete'][0] + actions[b'revert'][0]
+    ):
         if f in copied:
             repo.dirstate.copy(copied[f], f)
 
@@ -3748,7 +3803,7 @@
         if (
             not s._clearable
             or (commit and s._allowcommit)
-            or (s._opname == 'merge' and skipmerge)
+            or (s._opname == b'merge' and skipmerge)
             or s._reportonly
         ):
             continue
@@ -3767,7 +3822,7 @@
             raise error.Abort(state.msg(), hint=state.hint())
 
     for s in statemod._unfinishedstates:
-        if s._opname == 'merge' or state._reportonly:
+        if s._opname == b'merge' or state._reportonly:
             continue
         if s._clearable and s.isunfinished(repo):
             util.unlink(repo.vfs.join(s._fname))
@@ -3793,14 +3848,14 @@
     Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
     a boolean.
     '''
-    contmsg = _("continue: %s")
+    contmsg = _(b"continue: %s")
     for state in statemod._unfinishedstates:
         if not state._continueflag:
             continue
         if state.isunfinished(repo):
             return contmsg % state.continuemsg(), True
     if repo[None].dirty(missing=True, merge=False, branch=False):
-        return contmsg % _("hg commit"), False
+        return contmsg % _(b"hg commit"), False
     return None, None
 
 
@@ -3815,9 +3870,9 @@
     msg, warning = howtocontinue(repo)
     if msg is not None:
         if warning:
-            repo.ui.warn("%s\n" % msg)
+            repo.ui.warn(b"%s\n" % msg)
         else:
-            repo.ui.note("%s\n" % msg)
+            repo.ui.note(b"%s\n" % msg)
 
 
 def wrongtooltocontinue(repo, task):
@@ -3833,26 +3888,26 @@
     hint = None
     if after[1]:
         hint = after[0]
-    raise error.Abort(_('no %s in progress') % task, hint=hint)
+    raise error.Abort(_(b'no %s in progress') % task, hint=hint)
 
 
 def abortgraft(ui, repo, graftstate):
     """abort the interrupted graft and rollbacks to the state before interrupted
     graft"""
     if not graftstate.exists():
-        raise error.Abort(_("no interrupted graft to abort"))
+        raise error.Abort(_(b"no interrupted graft to abort"))
     statedata = readgraftstate(repo, graftstate)
-    newnodes = statedata.get('newnodes')
+    newnodes = statedata.get(b'newnodes')
     if newnodes is None:
         # and old graft state which does not have all the data required to abort
         # the graft
-        raise error.Abort(_("cannot abort using an old graftstate"))
+        raise error.Abort(_(b"cannot abort using an old graftstate"))
 
     # changeset from which graft operation was started
     if len(newnodes) > 0:
         startctx = repo[newnodes[0]].p1()
     else:
-        startctx = repo['.']
+        startctx = repo[b'.']
     # whether to strip or not
     cleanup = False
     from . import hg
@@ -3864,9 +3919,9 @@
         immutable = [c for c in newnodes if not repo[c].mutable()]
         if immutable:
             repo.ui.warn(
-                _("cannot clean up public changesets %s\n")
-                % ', '.join(bytes(repo[r]) for r in immutable),
-                hint=_("see 'hg help phases' for details"),
+                _(b"cannot clean up public changesets %s\n")
+                % b', '.join(bytes(repo[r]) for r in immutable),
+                hint=_(b"see 'hg help phases' for details"),
             )
             cleanup = False
 
@@ -3875,8 +3930,8 @@
         if desc - set(newnodes):
             repo.ui.warn(
                 _(
-                    "new changesets detected on destination "
-                    "branch, can't strip\n"
+                    b"new changesets detected on destination "
+                    b"branch, can't strip\n"
                 )
             )
             cleanup = False
@@ -3886,17 +3941,17 @@
                 hg.updaterepo(repo, startctx.node(), overwrite=True)
                 # stripping the new nodes created
                 strippoints = [
-                    c.node() for c in repo.set("roots(%ld)", newnodes)
+                    c.node() for c in repo.set(b"roots(%ld)", newnodes)
                 ]
                 repair.strip(repo.ui, repo, strippoints, backup=False)
 
     if not cleanup:
         # we don't update to the startnode if we can't strip
-        startctx = repo['.']
+        startctx = repo[b'.']
         hg.updaterepo(repo, startctx.node(), overwrite=True)
 
-    ui.status(_("graft aborted\n"))
-    ui.status(_("working directory is now at %s\n") % startctx.hex()[:12])
+    ui.status(_(b"graft aborted\n"))
+    ui.status(_(b"working directory is now at %s\n") % startctx.hex()[:12])
     graftstate.delete()
     return 0
 
@@ -3906,12 +3961,12 @@
     try:
         return graftstate.read()
     except error.CorruptedState:
-        nodes = repo.vfs.read('graftstate').splitlines()
-        return {'nodes': nodes}
+        nodes = repo.vfs.read(b'graftstate').splitlines()
+        return {b'nodes': nodes}
 
 
 def hgabortgraft(ui, repo):
     """ abort logic for aborting graft using 'hg abort'"""
     with repo.wlock():
-        graftstate = statemod.cmdstate(repo, 'graftstate')
+        graftstate = statemod.cmdstate(repo, b'graftstate')
         return abortgraft(ui, repo, graftstate)
--- a/mercurial/color.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/color.py	Sun Oct 06 09:48:39 2019 -0400
@@ -24,24 +24,24 @@
     # Mapping from effect name to terminfo attribute name (or raw code) or
     # color number.  This will also force-load the curses module.
     _baseterminfoparams = {
-        'none': (True, 'sgr0', ''),
-        'standout': (True, 'smso', ''),
-        'underline': (True, 'smul', ''),
-        'reverse': (True, 'rev', ''),
-        'inverse': (True, 'rev', ''),
-        'blink': (True, 'blink', ''),
-        'dim': (True, 'dim', ''),
-        'bold': (True, 'bold', ''),
-        'invisible': (True, 'invis', ''),
-        'italic': (True, 'sitm', ''),
-        'black': (False, curses.COLOR_BLACK, ''),
-        'red': (False, curses.COLOR_RED, ''),
-        'green': (False, curses.COLOR_GREEN, ''),
-        'yellow': (False, curses.COLOR_YELLOW, ''),
-        'blue': (False, curses.COLOR_BLUE, ''),
-        'magenta': (False, curses.COLOR_MAGENTA, ''),
-        'cyan': (False, curses.COLOR_CYAN, ''),
-        'white': (False, curses.COLOR_WHITE, ''),
+        b'none': (True, b'sgr0', b''),
+        b'standout': (True, b'smso', b''),
+        b'underline': (True, b'smul', b''),
+        b'reverse': (True, b'rev', b''),
+        b'inverse': (True, b'rev', b''),
+        b'blink': (True, b'blink', b''),
+        b'dim': (True, b'dim', b''),
+        b'bold': (True, b'bold', b''),
+        b'invisible': (True, b'invis', b''),
+        b'italic': (True, b'sitm', b''),
+        b'black': (False, curses.COLOR_BLACK, b''),
+        b'red': (False, curses.COLOR_RED, b''),
+        b'green': (False, curses.COLOR_GREEN, b''),
+        b'yellow': (False, curses.COLOR_YELLOW, b''),
+        b'blue': (False, curses.COLOR_BLUE, b''),
+        b'magenta': (False, curses.COLOR_MAGENTA, b''),
+        b'cyan': (False, curses.COLOR_CYAN, b''),
+        b'white': (False, curses.COLOR_WHITE, b''),
     }
 except ImportError:
     curses = None
@@ -49,101 +49,101 @@
 
 # start and stop parameters for effects
 _effects = {
-    'none': 0,
-    'black': 30,
-    'red': 31,
-    'green': 32,
-    'yellow': 33,
-    'blue': 34,
-    'magenta': 35,
-    'cyan': 36,
-    'white': 37,
-    'bold': 1,
-    'italic': 3,
-    'underline': 4,
-    'inverse': 7,
-    'dim': 2,
-    'black_background': 40,
-    'red_background': 41,
-    'green_background': 42,
-    'yellow_background': 43,
-    'blue_background': 44,
-    'purple_background': 45,
-    'cyan_background': 46,
-    'white_background': 47,
+    b'none': 0,
+    b'black': 30,
+    b'red': 31,
+    b'green': 32,
+    b'yellow': 33,
+    b'blue': 34,
+    b'magenta': 35,
+    b'cyan': 36,
+    b'white': 37,
+    b'bold': 1,
+    b'italic': 3,
+    b'underline': 4,
+    b'inverse': 7,
+    b'dim': 2,
+    b'black_background': 40,
+    b'red_background': 41,
+    b'green_background': 42,
+    b'yellow_background': 43,
+    b'blue_background': 44,
+    b'purple_background': 45,
+    b'cyan_background': 46,
+    b'white_background': 47,
 }
 
 _defaultstyles = {
-    'grep.match': 'red bold',
-    'grep.linenumber': 'green',
-    'grep.rev': 'blue',
-    'grep.sep': 'cyan',
-    'grep.filename': 'magenta',
-    'grep.user': 'magenta',
-    'grep.date': 'magenta',
-    'grep.inserted': 'green bold',
-    'grep.deleted': 'red bold',
-    'bookmarks.active': 'green',
-    'branches.active': 'none',
-    'branches.closed': 'black bold',
-    'branches.current': 'green',
-    'branches.inactive': 'none',
-    'diff.changed': 'white',
-    'diff.deleted': 'red',
-    'diff.deleted.changed': 'red bold underline',
-    'diff.deleted.unchanged': 'red',
-    'diff.diffline': 'bold',
-    'diff.extended': 'cyan bold',
-    'diff.file_a': 'red bold',
-    'diff.file_b': 'green bold',
-    'diff.hunk': 'magenta',
-    'diff.inserted': 'green',
-    'diff.inserted.changed': 'green bold underline',
-    'diff.inserted.unchanged': 'green',
-    'diff.tab': '',
-    'diff.trailingwhitespace': 'bold red_background',
-    'changeset.public': '',
-    'changeset.draft': '',
-    'changeset.secret': '',
-    'diffstat.deleted': 'red',
-    'diffstat.inserted': 'green',
-    'formatvariant.name.mismatchconfig': 'red',
-    'formatvariant.name.mismatchdefault': 'yellow',
-    'formatvariant.name.uptodate': 'green',
-    'formatvariant.repo.mismatchconfig': 'red',
-    'formatvariant.repo.mismatchdefault': 'yellow',
-    'formatvariant.repo.uptodate': 'green',
-    'formatvariant.config.special': 'yellow',
-    'formatvariant.config.default': 'green',
-    'formatvariant.default': '',
-    'histedit.remaining': 'red bold',
-    'ui.addremove.added': 'green',
-    'ui.addremove.removed': 'red',
-    'ui.error': 'red',
-    'ui.prompt': 'yellow',
-    'log.changeset': 'yellow',
-    'patchbomb.finalsummary': '',
-    'patchbomb.from': 'magenta',
-    'patchbomb.to': 'cyan',
-    'patchbomb.subject': 'green',
-    'patchbomb.diffstats': '',
-    'rebase.rebased': 'blue',
-    'rebase.remaining': 'red bold',
-    'resolve.resolved': 'green bold',
-    'resolve.unresolved': 'red bold',
-    'shelve.age': 'cyan',
-    'shelve.newest': 'green bold',
-    'shelve.name': 'blue bold',
-    'status.added': 'green bold',
-    'status.clean': 'none',
-    'status.copied': 'none',
-    'status.deleted': 'cyan bold underline',
-    'status.ignored': 'black bold',
-    'status.modified': 'blue bold',
-    'status.removed': 'red bold',
-    'status.unknown': 'magenta bold underline',
-    'tags.normal': 'green',
-    'tags.local': 'black bold',
+    b'grep.match': b'red bold',
+    b'grep.linenumber': b'green',
+    b'grep.rev': b'blue',
+    b'grep.sep': b'cyan',
+    b'grep.filename': b'magenta',
+    b'grep.user': b'magenta',
+    b'grep.date': b'magenta',
+    b'grep.inserted': b'green bold',
+    b'grep.deleted': b'red bold',
+    b'bookmarks.active': b'green',
+    b'branches.active': b'none',
+    b'branches.closed': b'black bold',
+    b'branches.current': b'green',
+    b'branches.inactive': b'none',
+    b'diff.changed': b'white',
+    b'diff.deleted': b'red',
+    b'diff.deleted.changed': b'red bold underline',
+    b'diff.deleted.unchanged': b'red',
+    b'diff.diffline': b'bold',
+    b'diff.extended': b'cyan bold',
+    b'diff.file_a': b'red bold',
+    b'diff.file_b': b'green bold',
+    b'diff.hunk': b'magenta',
+    b'diff.inserted': b'green',
+    b'diff.inserted.changed': b'green bold underline',
+    b'diff.inserted.unchanged': b'green',
+    b'diff.tab': b'',
+    b'diff.trailingwhitespace': b'bold red_background',
+    b'changeset.public': b'',
+    b'changeset.draft': b'',
+    b'changeset.secret': b'',
+    b'diffstat.deleted': b'red',
+    b'diffstat.inserted': b'green',
+    b'formatvariant.name.mismatchconfig': b'red',
+    b'formatvariant.name.mismatchdefault': b'yellow',
+    b'formatvariant.name.uptodate': b'green',
+    b'formatvariant.repo.mismatchconfig': b'red',
+    b'formatvariant.repo.mismatchdefault': b'yellow',
+    b'formatvariant.repo.uptodate': b'green',
+    b'formatvariant.config.special': b'yellow',
+    b'formatvariant.config.default': b'green',
+    b'formatvariant.default': b'',
+    b'histedit.remaining': b'red bold',
+    b'ui.addremove.added': b'green',
+    b'ui.addremove.removed': b'red',
+    b'ui.error': b'red',
+    b'ui.prompt': b'yellow',
+    b'log.changeset': b'yellow',
+    b'patchbomb.finalsummary': b'',
+    b'patchbomb.from': b'magenta',
+    b'patchbomb.to': b'cyan',
+    b'patchbomb.subject': b'green',
+    b'patchbomb.diffstats': b'',
+    b'rebase.rebased': b'blue',
+    b'rebase.remaining': b'red bold',
+    b'resolve.resolved': b'green bold',
+    b'resolve.unresolved': b'red bold',
+    b'shelve.age': b'cyan',
+    b'shelve.newest': b'green bold',
+    b'shelve.name': b'blue bold',
+    b'status.added': b'green bold',
+    b'status.clean': b'none',
+    b'status.copied': b'none',
+    b'status.deleted': b'cyan bold underline',
+    b'status.ignored': b'black bold',
+    b'status.modified': b'blue bold',
+    b'status.removed': b'red bold',
+    b'status.unknown': b'magenta bold underline',
+    b'tags.normal': b'green',
+    b'tags.local': b'black bold',
 }
 
 
@@ -158,16 +158,16 @@
     if curses is None:
         return
     # Otherwise, see what the config file says.
-    if mode not in ('auto', 'terminfo'):
+    if mode not in (b'auto', b'terminfo'):
         return
     ui._terminfoparams.update(_baseterminfoparams)
 
-    for key, val in ui.configitems('color'):
-        if key.startswith('color.'):
-            newval = (False, int(val), '')
+    for key, val in ui.configitems(b'color'):
+        if key.startswith(b'color.'):
+            newval = (False, int(val), b'')
             ui._terminfoparams[key[6:]] = newval
-        elif key.startswith('terminfo.'):
-            newval = (True, '', val.replace('\\E', '\x1b'))
+        elif key.startswith(b'terminfo.'):
+            newval = (True, b'', val.replace(b'\\E', b'\x1b'))
             ui._terminfoparams[key[9:]] = newval
     try:
         curses.setupterm()
@@ -181,16 +181,16 @@
         if not c and not curses.tigetstr(pycompat.sysstr(e)):
             # Most terminals don't support dim, invis, etc, so don't be
             # noisy and use ui.debug().
-            ui.debug("no terminfo entry for %s\n" % e)
+            ui.debug(b"no terminfo entry for %s\n" % e)
             del ui._terminfoparams[key]
     if not curses.tigetstr(r'setaf') or not curses.tigetstr(r'setab'):
         # Only warn about missing terminfo entries if we explicitly asked for
         # terminfo mode and we're in a formatted terminal.
-        if mode == "terminfo" and formatted:
+        if mode == b"terminfo" and formatted:
             ui.warn(
                 _(
-                    "no terminfo entry for setab/setaf: reverting to "
-                    "ECMA-48 color\n"
+                    b"no terminfo entry for setab/setaf: reverting to "
+                    b"ECMA-48 color\n"
                 )
             )
         ui._terminfoparams.clear()
@@ -203,23 +203,26 @@
     the configuration looking for custom colors and effect definitions."""
     mode = _modesetup(ui)
     ui._colormode = mode
-    if mode and mode != 'debug':
+    if mode and mode != b'debug':
         configstyles(ui)
 
 
 def _modesetup(ui):
-    if ui.plain('color'):
+    if ui.plain(b'color'):
         return None
-    config = ui.config('ui', 'color')
-    if config == 'debug':
-        return 'debug'
+    config = ui.config(b'ui', b'color')
+    if config == b'debug':
+        return b'debug'
 
-    auto = config == 'auto'
+    auto = config == b'auto'
     always = False
     if not auto and stringutil.parsebool(config):
         # We want the config to behave like a boolean, "on" is actually auto,
         # but "always" value is treated as a special case to reduce confusion.
-        if ui.configsource('ui', 'color') == '--color' or config == 'always':
+        if (
+            ui.configsource(b'ui', b'color') == b'--color'
+            or config == b'always'
+        ):
             always = True
         else:
             auto = True
@@ -228,64 +231,64 @@
         return None
 
     formatted = always or (
-        encoding.environ.get('TERM') != 'dumb' and ui.formatted()
+        encoding.environ.get(b'TERM') != b'dumb' and ui.formatted()
     )
 
-    mode = ui.config('color', 'mode')
+    mode = ui.config(b'color', b'mode')
 
     # If pager is active, color.pagermode overrides color.mode.
     if getattr(ui, 'pageractive', False):
-        mode = ui.config('color', 'pagermode', mode)
+        mode = ui.config(b'color', b'pagermode', mode)
 
     realmode = mode
     if pycompat.iswindows:
         from . import win32
 
-        term = encoding.environ.get('TERM')
+        term = encoding.environ.get(b'TERM')
         # TERM won't be defined in a vanilla cmd.exe environment.
 
         # UNIX-like environments on Windows such as Cygwin and MSYS will
         # set TERM. They appear to make a best effort attempt at setting it
         # to something appropriate. However, not all environments with TERM
         # defined support ANSI.
-        ansienviron = term and 'xterm' in term
+        ansienviron = term and b'xterm' in term
 
-        if mode == 'auto':
+        if mode == b'auto':
             # Since "ansi" could result in terminal gibberish, we error on the
             # side of selecting "win32". However, if w32effects is not defined,
             # we almost certainly don't support "win32", so don't even try.
             # w32effects is not populated when stdout is redirected, so checking
             # it first avoids win32 calls in a state known to error out.
             if ansienviron or not w32effects or win32.enablevtmode():
-                realmode = 'ansi'
+                realmode = b'ansi'
             else:
-                realmode = 'win32'
+                realmode = b'win32'
         # An empty w32effects is a clue that stdout is redirected, and thus
         # cannot enable VT mode.
-        elif mode == 'ansi' and w32effects and not ansienviron:
+        elif mode == b'ansi' and w32effects and not ansienviron:
             win32.enablevtmode()
-    elif mode == 'auto':
-        realmode = 'ansi'
+    elif mode == b'auto':
+        realmode = b'ansi'
 
     def modewarn():
         # only warn if color.mode was explicitly set and we're in
         # a formatted terminal
         if mode == realmode and formatted:
-            ui.warn(_('warning: failed to set color mode to %s\n') % mode)
+            ui.warn(_(b'warning: failed to set color mode to %s\n') % mode)
 
-    if realmode == 'win32':
+    if realmode == b'win32':
         ui._terminfoparams.clear()
         if not w32effects:
             modewarn()
             return None
-    elif realmode == 'ansi':
+    elif realmode == b'ansi':
         ui._terminfoparams.clear()
-    elif realmode == 'terminfo':
+    elif realmode == b'terminfo':
         _terminfosetup(ui, mode, formatted)
         if not ui._terminfoparams:
             ## FIXME Shouldn't we return None in this case too?
             modewarn()
-            realmode = 'ansi'
+            realmode = b'ansi'
     else:
         return None
 
@@ -296,10 +299,10 @@
 
 def configstyles(ui):
     ui._styles.update(_defaultstyles)
-    for status, cfgeffects in ui.configitems('color'):
-        if '.' not in status or status.startswith(('color.', 'terminfo.')):
+    for status, cfgeffects in ui.configitems(b'color'):
+        if b'.' not in status or status.startswith((b'color.', b'terminfo.')):
             continue
-        cfgeffects = ui.configlist('color', status)
+        cfgeffects = ui.configlist(b'color', status)
         if cfgeffects:
             good = []
             for e in cfgeffects:
@@ -308,17 +311,17 @@
                 else:
                     ui.warn(
                         _(
-                            "ignoring unknown color/effect %s "
-                            "(configured in color.%s)\n"
+                            b"ignoring unknown color/effect %s "
+                            b"(configured in color.%s)\n"
                         )
                         % (stringutil.pprint(e), status)
                     )
-            ui._styles[status] = ' '.join(good)
+            ui._styles[status] = b' '.join(good)
 
 
 def _activeeffects(ui):
     '''Return the effects map for the color mode set on the ui.'''
-    if ui._colormode == 'win32':
+    if ui._colormode == b'win32':
         return w32effects
     elif ui._colormode is not None:
         return _effects
@@ -326,7 +329,7 @@
 
 
 def valideffect(ui, effect):
-    'Determine if the effect is valid or not.'
+    b'Determine if the effect is valid or not.'
     return (not ui._terminfoparams and effect in _activeeffects(ui)) or (
         effect in ui._terminfoparams or effect[:-11] in ui._terminfoparams
     )
@@ -336,13 +339,13 @@
     '''Helper function for render_effects().'''
 
     bg = False
-    if effect.endswith('_background'):
+    if effect.endswith(b'_background'):
         bg = True
         effect = effect[:-11]
     try:
         attr, val, termcode = ui._terminfoparams[effect]
     except KeyError:
-        return ''
+        return b''
     if attr:
         if termcode:
             return termcode
@@ -369,26 +372,26 @@
         if not t:
             continue
         parts.extend([start, t, stop])
-    return ''.join(parts)
+    return b''.join(parts)
 
 
 def _render_effects(ui, text, effects):
-    'Wrap text in commands to turn on each effect.'
+    b'Wrap text in commands to turn on each effect.'
     if not text:
         return text
     if ui._terminfoparams:
-        start = ''.join(
-            _effect_str(ui, effect) for effect in ['none'] + effects.split()
+        start = b''.join(
+            _effect_str(ui, effect) for effect in [b'none'] + effects.split()
         )
-        stop = _effect_str(ui, 'none')
+        stop = _effect_str(ui, b'none')
     else:
         activeeffects = _activeeffects(ui)
         start = [
             pycompat.bytestr(activeeffects[e])
-            for e in ['none'] + effects.split()
+            for e in [b'none'] + effects.split()
         ]
-        start = '\033[' + ';'.join(start) + 'm'
-        stop = '\033[' + pycompat.bytestr(activeeffects['none']) + 'm'
+        start = b'\033[' + b';'.join(start) + b'm'
+        stop = b'\033[' + pycompat.bytestr(activeeffects[b'none']) + b'm'
     return _mergeeffects(text, start, stop)
 
 
@@ -397,29 +400,32 @@
 
 def stripeffects(text):
     """Strip ANSI control codes which could be inserted by colorlabel()"""
-    return _ansieffectre.sub('', text)
+    return _ansieffectre.sub(b'', text)
 
 
 def colorlabel(ui, msg, label):
     """add color control code according to the mode"""
-    if ui._colormode == 'debug':
+    if ui._colormode == b'debug':
         if label and msg:
-            if msg.endswith('\n'):
-                msg = "[%s|%s]\n" % (label, msg[:-1])
+            if msg.endswith(b'\n'):
+                msg = b"[%s|%s]\n" % (label, msg[:-1])
             else:
-                msg = "[%s|%s]" % (label, msg)
+                msg = b"[%s|%s]" % (label, msg)
     elif ui._colormode is not None:
         effects = []
         for l in label.split():
-            s = ui._styles.get(l, '')
+            s = ui._styles.get(l, b'')
             if s:
                 effects.append(s)
             elif valideffect(ui, l):
                 effects.append(l)
-        effects = ' '.join(effects)
+        effects = b' '.join(effects)
         if effects:
-            msg = '\n'.join(
-                [_render_effects(ui, line, effects) for line in msg.split('\n')]
+            msg = b'\n'.join(
+                [
+                    _render_effects(ui, line, effects)
+                    for line in msg.split(b'\n')
+                ]
             )
     return msg
 
@@ -472,29 +478,29 @@
 
     # http://msdn.microsoft.com/en-us/library/ms682088%28VS.85%29.aspx
     w32effects = {
-        'none': -1,
-        'black': 0,
-        'red': _FOREGROUND_RED,
-        'green': _FOREGROUND_GREEN,
-        'yellow': _FOREGROUND_RED | _FOREGROUND_GREEN,
-        'blue': _FOREGROUND_BLUE,
-        'magenta': _FOREGROUND_BLUE | _FOREGROUND_RED,
-        'cyan': _FOREGROUND_BLUE | _FOREGROUND_GREEN,
-        'white': _FOREGROUND_RED | _FOREGROUND_GREEN | _FOREGROUND_BLUE,
-        'bold': _FOREGROUND_INTENSITY,
-        'black_background': 0x100,  # unused value > 0x0f
-        'red_background': _BACKGROUND_RED,
-        'green_background': _BACKGROUND_GREEN,
-        'yellow_background': _BACKGROUND_RED | _BACKGROUND_GREEN,
-        'blue_background': _BACKGROUND_BLUE,
-        'purple_background': _BACKGROUND_BLUE | _BACKGROUND_RED,
-        'cyan_background': _BACKGROUND_BLUE | _BACKGROUND_GREEN,
-        'white_background': (
+        b'none': -1,
+        b'black': 0,
+        b'red': _FOREGROUND_RED,
+        b'green': _FOREGROUND_GREEN,
+        b'yellow': _FOREGROUND_RED | _FOREGROUND_GREEN,
+        b'blue': _FOREGROUND_BLUE,
+        b'magenta': _FOREGROUND_BLUE | _FOREGROUND_RED,
+        b'cyan': _FOREGROUND_BLUE | _FOREGROUND_GREEN,
+        b'white': _FOREGROUND_RED | _FOREGROUND_GREEN | _FOREGROUND_BLUE,
+        b'bold': _FOREGROUND_INTENSITY,
+        b'black_background': 0x100,  # unused value > 0x0f
+        b'red_background': _BACKGROUND_RED,
+        b'green_background': _BACKGROUND_GREEN,
+        b'yellow_background': _BACKGROUND_RED | _BACKGROUND_GREEN,
+        b'blue_background': _BACKGROUND_BLUE,
+        b'purple_background': _BACKGROUND_BLUE | _BACKGROUND_RED,
+        b'cyan_background': _BACKGROUND_BLUE | _BACKGROUND_GREEN,
+        b'white_background': (
             _BACKGROUND_RED | _BACKGROUND_GREEN | _BACKGROUND_BLUE
         ),
-        'bold_background': _BACKGROUND_INTENSITY,
-        'underline': _COMMON_LVB_UNDERSCORE,  # double-byte charsets only
-        'inverse': _COMMON_LVB_REVERSE_VIDEO,  # double-byte charsets only
+        b'bold_background': _BACKGROUND_INTENSITY,
+        b'underline': _COMMON_LVB_UNDERSCORE,  # double-byte charsets only
+        b'inverse': _COMMON_LVB_REVERSE_VIDEO,  # double-byte charsets only
     }
 
     passthrough = {
@@ -522,7 +528,7 @@
             )
 
     def win32print(ui, writefunc, text, **opts):
-        label = opts.get(r'label', '')
+        label = opts.get(r'label', b'')
         attr = origattr
 
         def mapcolor(val, attr):
@@ -537,7 +543,7 @@
 
         # determine console attributes based on labels
         for l in label.split():
-            style = ui._styles.get(l, '')
+            style = ui._styles.get(l, b'')
             for effect in style.split():
                 try:
                     attr = mapcolor(w32effects[effect], attr)
--- a/mercurial/commands.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/commands.py	Sun Oct 06 09:48:39 2019 -0400
@@ -81,65 +81,67 @@
 
 globalopts = [
     (
-        'R',
-        'repository',
-        '',
-        _('repository root directory or name of overlay bundle file'),
-        _('REPO'),
+        b'R',
+        b'repository',
+        b'',
+        _(b'repository root directory or name of overlay bundle file'),
+        _(b'REPO'),
     ),
-    ('', 'cwd', '', _('change working directory'), _('DIR')),
+    (b'', b'cwd', b'', _(b'change working directory'), _(b'DIR')),
     (
-        'y',
-        'noninteractive',
+        b'y',
+        b'noninteractive',
         None,
-        _('do not prompt, automatically pick the first choice for all prompts'),
+        _(
+            b'do not prompt, automatically pick the first choice for all prompts'
+        ),
     ),
-    ('q', 'quiet', None, _('suppress output')),
-    ('v', 'verbose', None, _('enable additional output')),
+    (b'q', b'quiet', None, _(b'suppress output')),
+    (b'v', b'verbose', None, _(b'enable additional output')),
     (
-        '',
-        'color',
-        '',
+        b'',
+        b'color',
+        b'',
         # i18n: 'always', 'auto', 'never', and 'debug' are keywords
         # and should not be translated
-        _("when to colorize (boolean, always, auto, never, or debug)"),
-        _('TYPE'),
+        _(b"when to colorize (boolean, always, auto, never, or debug)"),
+        _(b'TYPE'),
     ),
     (
-        '',
-        'config',
+        b'',
+        b'config',
         [],
-        _('set/override config option (use \'section.name=value\')'),
-        _('CONFIG'),
+        _(b'set/override config option (use \'section.name=value\')'),
+        _(b'CONFIG'),
     ),
-    ('', 'debug', None, _('enable debugging output')),
-    ('', 'debugger', None, _('start debugger')),
+    (b'', b'debug', None, _(b'enable debugging output')),
+    (b'', b'debugger', None, _(b'start debugger')),
     (
-        '',
-        'encoding',
+        b'',
+        b'encoding',
         encoding.encoding,
-        _('set the charset encoding'),
-        _('ENCODE'),
+        _(b'set the charset encoding'),
+        _(b'ENCODE'),
     ),
     (
-        '',
-        'encodingmode',
+        b'',
+        b'encodingmode',
         encoding.encodingmode,
-        _('set the charset encoding mode'),
-        _('MODE'),
+        _(b'set the charset encoding mode'),
+        _(b'MODE'),
     ),
-    ('', 'traceback', None, _('always print a traceback on exception')),
-    ('', 'time', None, _('time how long the command takes')),
-    ('', 'profile', None, _('print command execution profile')),
-    ('', 'version', None, _('output version information and exit')),
-    ('h', 'help', None, _('display help and exit')),
-    ('', 'hidden', False, _('consider hidden changesets')),
+    (b'', b'traceback', None, _(b'always print a traceback on exception')),
+    (b'', b'time', None, _(b'time how long the command takes')),
+    (b'', b'profile', None, _(b'print command execution profile')),
+    (b'', b'version', None, _(b'output version information and exit')),
+    (b'h', b'help', None, _(b'display help and exit')),
+    (b'', b'hidden', False, _(b'consider hidden changesets')),
     (
-        '',
-        'pager',
-        'auto',
-        _("when to paginate (boolean, always, auto, or never)"),
-        _('TYPE'),
+        b'',
+        b'pager',
+        b'auto',
+        _(b"when to paginate (boolean, always, auto, or never)"),
+        _(b'TYPE'),
     ),
 ]
 
@@ -164,7 +166,7 @@
 
 
 @command(
-    'abort',
+    b'abort',
     dryrunopts,
     helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
     helpbasic=True,
@@ -180,25 +182,27 @@
     dryrun = opts.get(r'dry_run')
     abortstate = cmdutil.getunfinishedstate(repo)
     if not abortstate:
-        raise error.Abort(_('no operation in progress'))
+        raise error.Abort(_(b'no operation in progress'))
     if not abortstate.abortfunc:
         raise error.Abort(
             (
-                _("%s in progress but does not support 'hg abort'")
+                _(b"%s in progress but does not support 'hg abort'")
                 % (abortstate._opname)
             ),
             hint=abortstate.hint(),
         )
     if dryrun:
-        ui.status(_('%s in progress, will be aborted\n') % (abortstate._opname))
+        ui.status(
+            _(b'%s in progress, will be aborted\n') % (abortstate._opname)
+        )
         return
     return abortstate.abortfunc(ui, repo)
 
 
 @command(
-    'add',
+    b'add',
     walkopts + subrepoopts + dryrunopts,
-    _('[OPTION]... [FILE]...'),
+    _(b'[OPTION]... [FILE]...'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
     helpbasic=True,
     inferrepo=True,
@@ -248,14 +252,14 @@
 
     m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
     uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
-    rejected = cmdutil.add(ui, repo, m, "", uipathfn, False, **opts)
+    rejected = cmdutil.add(ui, repo, m, b"", uipathfn, False, **opts)
     return rejected and 1 or 0
 
 
 @command(
-    'addremove',
+    b'addremove',
     similarityopts + subrepoopts + walkopts + dryrunopts,
-    _('[OPTION]... [FILE]...'),
+    _(b'[OPTION]... [FILE]...'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
     inferrepo=True,
 )
@@ -322,43 +326,49 @@
     Returns 0 if all files are successfully added.
     """
     opts = pycompat.byteskwargs(opts)
-    if not opts.get('similarity'):
-        opts['similarity'] = '100'
+    if not opts.get(b'similarity'):
+        opts[b'similarity'] = b'100'
     matcher = scmutil.match(repo[None], pats, opts)
     relative = scmutil.anypats(pats, opts)
     uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
-    return scmutil.addremove(repo, matcher, "", uipathfn, opts)
+    return scmutil.addremove(repo, matcher, b"", uipathfn, opts)
 
 
 @command(
-    'annotate|blame',
+    b'annotate|blame',
     [
-        ('r', 'rev', '', _('annotate the specified revision'), _('REV')),
+        (b'r', b'rev', b'', _(b'annotate the specified revision'), _(b'REV')),
         (
-            '',
-            'follow',
+            b'',
+            b'follow',
             None,
-            _('follow copies/renames and list the filename (DEPRECATED)'),
+            _(b'follow copies/renames and list the filename (DEPRECATED)'),
         ),
-        ('', 'no-follow', None, _("don't follow copies and renames")),
-        ('a', 'text', None, _('treat all files as text')),
-        ('u', 'user', None, _('list the author (long with -v)')),
-        ('f', 'file', None, _('list the filename')),
-        ('d', 'date', None, _('list the date (short with -q)')),
-        ('n', 'number', None, _('list the revision number (default)')),
-        ('c', 'changeset', None, _('list the changeset')),
+        (b'', b'no-follow', None, _(b"don't follow copies and renames")),
+        (b'a', b'text', None, _(b'treat all files as text')),
+        (b'u', b'user', None, _(b'list the author (long with -v)')),
+        (b'f', b'file', None, _(b'list the filename')),
+        (b'd', b'date', None, _(b'list the date (short with -q)')),
+        (b'n', b'number', None, _(b'list the revision number (default)')),
+        (b'c', b'changeset', None, _(b'list the changeset')),
         (
-            'l',
-            'line-number',
+            b'l',
+            b'line-number',
             None,
-            _('show line number at the first appearance'),
+            _(b'show line number at the first appearance'),
         ),
-        ('', 'skip', [], _('revision to not display (EXPERIMENTAL)'), _('REV')),
+        (
+            b'',
+            b'skip',
+            [],
+            _(b'revision to not display (EXPERIMENTAL)'),
+            _(b'REV'),
+        ),
     ]
     + diffwsopts
     + walkopts
     + formatteropts,
-    _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
+    _(b'[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
     helpcategory=command.CATEGORY_FILE_CONTENTS,
     helpbasic=True,
     inferrepo=True,
@@ -403,32 +413,36 @@
     """
     opts = pycompat.byteskwargs(opts)
     if not pats:
-        raise error.Abort(_('at least one filename or pattern is required'))
-
-    if opts.get('follow'):
+        raise error.Abort(_(b'at least one filename or pattern is required'))
+
+    if opts.get(b'follow'):
         # --follow is deprecated and now just an alias for -f/--file
         # to mimic the behavior of Mercurial before version 1.5
-        opts['file'] = True
+        opts[b'file'] = True
 
     if (
-        not opts.get('user')
-        and not opts.get('changeset')
-        and not opts.get('date')
-        and not opts.get('file')
+        not opts.get(b'user')
+        and not opts.get(b'changeset')
+        and not opts.get(b'date')
+        and not opts.get(b'file')
     ):
-        opts['number'] = True
-
-    linenumber = opts.get('line_number') is not None
-    if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
-        raise error.Abort(_('at least one of -n/-c is required for -l'))
-
-    rev = opts.get('rev')
+        opts[b'number'] = True
+
+    linenumber = opts.get(b'line_number') is not None
+    if (
+        linenumber
+        and (not opts.get(b'changeset'))
+        and (not opts.get(b'number'))
+    ):
+        raise error.Abort(_(b'at least one of -n/-c is required for -l'))
+
+    rev = opts.get(b'rev')
     if rev:
-        repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
+        repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
     ctx = scmutil.revsingle(repo, rev)
 
-    ui.pager('annotate')
-    rootfm = ui.formatter('annotate', opts)
+    ui.pager(b'annotate')
+    rootfm = ui.formatter(b'annotate', opts)
     if ui.debugflag:
         shorthex = pycompat.identity
     else:
@@ -441,45 +455,45 @@
     else:
         datefunc = dateutil.datestr
     if ctx.rev() is None:
-        if opts.get('changeset'):
+        if opts.get(b'changeset'):
             # omit "+" suffix which is appended to node hex
             def formatrev(rev):
                 if rev == wdirrev:
-                    return '%d' % ctx.p1().rev()
+                    return b'%d' % ctx.p1().rev()
                 else:
-                    return '%d' % rev
+                    return b'%d' % rev
 
         else:
 
             def formatrev(rev):
                 if rev == wdirrev:
-                    return '%d+' % ctx.p1().rev()
+                    return b'%d+' % ctx.p1().rev()
                 else:
-                    return '%d ' % rev
+                    return b'%d ' % rev
 
         def formathex(h):
             if h == wdirhex:
-                return '%s+' % shorthex(hex(ctx.p1().node()))
+                return b'%s+' % shorthex(hex(ctx.p1().node()))
             else:
-                return '%s ' % shorthex(h)
+                return b'%s ' % shorthex(h)
 
     else:
         formatrev = b'%d'.__mod__
         formathex = shorthex
 
     opmap = [
-        ('user', ' ', lambda x: x.fctx.user(), ui.shortuser),
-        ('rev', ' ', lambda x: scmutil.intrev(x.fctx), formatrev),
-        ('node', ' ', lambda x: hex(scmutil.binnode(x.fctx)), formathex),
-        ('date', ' ', lambda x: x.fctx.date(), util.cachefunc(datefunc)),
-        ('path', ' ', lambda x: x.fctx.path(), pycompat.bytestr),
-        ('lineno', ':', lambda x: x.lineno, pycompat.bytestr),
+        (b'user', b' ', lambda x: x.fctx.user(), ui.shortuser),
+        (b'rev', b' ', lambda x: scmutil.intrev(x.fctx), formatrev),
+        (b'node', b' ', lambda x: hex(scmutil.binnode(x.fctx)), formathex),
+        (b'date', b' ', lambda x: x.fctx.date(), util.cachefunc(datefunc)),
+        (b'path', b' ', lambda x: x.fctx.path(), pycompat.bytestr),
+        (b'lineno', b':', lambda x: x.lineno, pycompat.bytestr),
     ]
     opnamemap = {
-        'rev': 'number',
-        'node': 'changeset',
-        'path': 'file',
-        'lineno': 'line_number',
+        b'rev': b'number',
+        b'node': b'changeset',
+        b'path': b'file',
+        b'lineno': b'line_number',
     }
 
     if rootfm.isplain():
@@ -498,23 +512,23 @@
         for fn, sep, get, fmt in opmap
         if opts.get(opnamemap.get(fn, fn)) or fn in datahint
     ]
-    funcmap[0] = (funcmap[0][0], '')  # no separator in front of first column
-    fields = ' '.join(
+    funcmap[0] = (funcmap[0][0], b'')  # no separator in front of first column
+    fields = b' '.join(
         fn
         for fn, sep, get, fmt in opmap
         if opts.get(opnamemap.get(fn, fn)) or fn in datahint
     )
 
     def bad(x, y):
-        raise error.Abort("%s: %s" % (x, y))
+        raise error.Abort(b"%s: %s" % (x, y))
 
     m = scmutil.match(ctx, pats, opts, badfn=bad)
 
-    follow = not opts.get('no_follow')
+    follow = not opts.get(b'no_follow')
     diffopts = patch.difffeatureopts(
-        ui, opts, section='annotate', whitespace=True
+        ui, opts, section=b'annotate', whitespace=True
     )
-    skiprevs = opts.get('skip')
+    skiprevs = opts.get(b'skip')
     if skiprevs:
         skiprevs = scmutil.revrange(repo, skiprevs)
 
@@ -523,11 +537,11 @@
         fctx = ctx[abs]
         rootfm.startitem()
         rootfm.data(path=abs)
-        if not opts.get('text') and fctx.isbinary():
-            rootfm.plain(_("%s: binary file\n") % uipathfn(abs))
+        if not opts.get(b'text') and fctx.isbinary():
+            rootfm.plain(_(b"%s: binary file\n") % uipathfn(abs))
             continue
 
-        fm = rootfm.nested('lines', tmpl='{rev}: {line}')
+        fm = rootfm.nested(b'lines', tmpl=b'{rev}: {line}')
         lines = fctx.annotate(
             follow=follow, skiprevs=skiprevs, diffopts=diffopts
         )
@@ -542,45 +556,45 @@
             if fm.isplain():
                 sizes = [encoding.colwidth(x) for x in l]
                 ml = max(sizes)
-                formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
+                formats.append([sep + b' ' * (ml - w) + b'%s' for w in sizes])
             else:
-                formats.append(['%s' for x in l])
+                formats.append([b'%s' for x in l])
             pieces.append(l)
 
         for f, p, n in zip(zip(*formats), zip(*pieces), lines):
             fm.startitem()
             fm.context(fctx=n.fctx)
-            fm.write(fields, "".join(f), *p)
+            fm.write(fields, b"".join(f), *p)
             if n.skip:
-                fmt = "* %s"
+                fmt = b"* %s"
             else:
-                fmt = ": %s"
-            fm.write('line', fmt, n.text)
-
-        if not lines[-1].text.endswith('\n'):
-            fm.plain('\n')
+                fmt = b": %s"
+            fm.write(b'line', fmt, n.text)
+
+        if not lines[-1].text.endswith(b'\n'):
+            fm.plain(b'\n')
         fm.end()
 
     rootfm.end()
 
 
 @command(
-    'archive',
+    b'archive',
     [
-        ('', 'no-decode', None, _('do not pass files through decoders')),
+        (b'', b'no-decode', None, _(b'do not pass files through decoders')),
         (
-            'p',
-            'prefix',
-            '',
-            _('directory prefix for files in archive'),
-            _('PREFIX'),
+            b'p',
+            b'prefix',
+            b'',
+            _(b'directory prefix for files in archive'),
+            _(b'PREFIX'),
         ),
-        ('r', 'rev', '', _('revision to distribute'), _('REV')),
-        ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
+        (b'r', b'rev', b'', _(b'revision to distribute'), _(b'REV')),
+        (b't', b'type', b'', _(b'type of distribution to create'), _(b'TYPE')),
     ]
     + subrepoopts
     + walkopts,
-    _('[OPTION]... DEST'),
+    _(b'[OPTION]... DEST'),
     helpcategory=command.CATEGORY_IMPORT_EXPORT,
 )
 def archive(ui, repo, dest, **opts):
@@ -626,26 +640,26 @@
     '''
 
     opts = pycompat.byteskwargs(opts)
-    rev = opts.get('rev')
+    rev = opts.get(b'rev')
     if rev:
-        repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
+        repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
     ctx = scmutil.revsingle(repo, rev)
     if not ctx:
-        raise error.Abort(_('no working directory: please specify a revision'))
+        raise error.Abort(_(b'no working directory: please specify a revision'))
     node = ctx.node()
     dest = cmdutil.makefilename(ctx, dest)
     if os.path.realpath(dest) == repo.root:
-        raise error.Abort(_('repository root cannot be destination'))
-
-    kind = opts.get('type') or archival.guesskind(dest) or 'files'
-    prefix = opts.get('prefix')
-
-    if dest == '-':
-        if kind == 'files':
-            raise error.Abort(_('cannot archive plain files to stdout'))
+        raise error.Abort(_(b'repository root cannot be destination'))
+
+    kind = opts.get(b'type') or archival.guesskind(dest) or b'files'
+    prefix = opts.get(b'prefix')
+
+    if dest == b'-':
+        if kind == b'files':
+            raise error.Abort(_(b'cannot archive plain files to stdout'))
         dest = cmdutil.makefileobj(ctx, dest)
         if not prefix:
-            prefix = os.path.basename(repo.root) + '-%h'
+            prefix = os.path.basename(repo.root) + b'-%h'
 
     prefix = cmdutil.makefilename(ctx, prefix)
     match = scmutil.match(ctx, [], opts)
@@ -654,39 +668,44 @@
         dest,
         node,
         kind,
-        not opts.get('no_decode'),
+        not opts.get(b'no_decode'),
         match,
         prefix,
-        subrepos=opts.get('subrepos'),
+        subrepos=opts.get(b'subrepos'),
     )
 
 
 @command(
-    'backout',
+    b'backout',
     [
-        ('', 'merge', None, _('merge with old dirstate parent after backout')),
         (
-            '',
-            'commit',
+            b'',
+            b'merge',
             None,
-            _('commit if no conflicts were encountered (DEPRECATED)'),
+            _(b'merge with old dirstate parent after backout'),
         ),
-        ('', 'no-commit', None, _('do not commit')),
+        (
+            b'',
+            b'commit',
+            None,
+            _(b'commit if no conflicts were encountered (DEPRECATED)'),
+        ),
+        (b'', b'no-commit', None, _(b'do not commit')),
         (
-            '',
-            'parent',
-            '',
-            _('parent to choose when backing out merge (DEPRECATED)'),
-            _('REV'),
+            b'',
+            b'parent',
+            b'',
+            _(b'parent to choose when backing out merge (DEPRECATED)'),
+            _(b'REV'),
         ),
-        ('r', 'rev', '', _('revision to backout'), _('REV')),
-        ('e', 'edit', False, _('invoke editor on commit messages')),
+        (b'r', b'rev', b'', _(b'revision to backout'), _(b'REV')),
+        (b'e', b'edit', False, _(b'invoke editor on commit messages')),
     ]
     + mergetoolopts
     + walkopts
     + commitopts
     + commitopts2,
-    _('[OPTION]... [-r] REV'),
+    _(b'[OPTION]... [-r] REV'),
     helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
 )
 def backout(ui, repo, node=None, rev=None, **opts):
@@ -747,23 +766,23 @@
 
 def _dobackout(ui, repo, node=None, rev=None, **opts):
     opts = pycompat.byteskwargs(opts)
-    if opts.get('commit') and opts.get('no_commit'):
-        raise error.Abort(_("cannot use --commit with --no-commit"))
-    if opts.get('merge') and opts.get('no_commit'):
-        raise error.Abort(_("cannot use --merge with --no-commit"))
+    if opts.get(b'commit') and opts.get(b'no_commit'):
+        raise error.Abort(_(b"cannot use --commit with --no-commit"))
+    if opts.get(b'merge') and opts.get(b'no_commit'):
+        raise error.Abort(_(b"cannot use --merge with --no-commit"))
 
     if rev and node:
-        raise error.Abort(_("please specify just one revision"))
+        raise error.Abort(_(b"please specify just one revision"))
 
     if not rev:
         rev = node
 
     if not rev:
-        raise error.Abort(_("please specify a revision to backout"))
-
-    date = opts.get('date')
+        raise error.Abort(_(b"please specify a revision to backout"))
+
+    date = opts.get(b'date')
     if date:
-        opts['date'] = dateutil.parsedate(date)
+        opts[b'date'] = dateutil.parsedate(date)
 
     cmdutil.checkunfinished(repo)
     cmdutil.bailifchanged(repo)
@@ -771,33 +790,33 @@
 
     op1, op2 = repo.dirstate.parents()
     if not repo.changelog.isancestor(node, op1):
-        raise error.Abort(_('cannot backout change that is not an ancestor'))
+        raise error.Abort(_(b'cannot backout change that is not an ancestor'))
 
     p1, p2 = repo.changelog.parents(node)
     if p1 == nullid:
-        raise error.Abort(_('cannot backout a change with no parents'))
+        raise error.Abort(_(b'cannot backout a change with no parents'))
     if p2 != nullid:
-        if not opts.get('parent'):
-            raise error.Abort(_('cannot backout a merge changeset'))
-        p = repo.lookup(opts['parent'])
+        if not opts.get(b'parent'):
+            raise error.Abort(_(b'cannot backout a merge changeset'))
+        p = repo.lookup(opts[b'parent'])
         if p not in (p1, p2):
             raise error.Abort(
-                _('%s is not a parent of %s') % (short(p), short(node))
+                _(b'%s is not a parent of %s') % (short(p), short(node))
             )
         parent = p
     else:
-        if opts.get('parent'):
-            raise error.Abort(_('cannot use --parent on non-merge changeset'))
+        if opts.get(b'parent'):
+            raise error.Abort(_(b'cannot use --parent on non-merge changeset'))
         parent = p1
 
     # the backout should appear on the same branch
     branch = repo.dirstate.branch()
     bheads = repo.branchheads(branch)
     rctx = scmutil.revsingle(repo, hex(parent))
-    if not opts.get('merge') and op1 != node:
-        with dirstateguard.dirstateguard(repo, 'backout'):
-            overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
-            with ui.configoverride(overrides, 'backout'):
+    if not opts.get(b'merge') and op1 != node:
+        with dirstateguard.dirstateguard(repo, b'backout'):
+            overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
+            with ui.configoverride(overrides, b'backout'):
                 stats = mergemod.update(
                     repo,
                     parent,
@@ -810,7 +829,7 @@
         hg._showstats(repo, stats)
         if stats.unresolvedcount:
             repo.ui.status(
-                _("use 'hg resolve' to retry unresolved " "file merges\n")
+                _(b"use 'hg resolve' to retry unresolved " b"file merges\n")
             )
             return 1
     else:
@@ -818,64 +837,66 @@
         repo.dirstate.setbranch(branch)
         cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
 
-    if opts.get('no_commit'):
-        msg = _("changeset %s backed out, " "don't forget to commit.\n")
+    if opts.get(b'no_commit'):
+        msg = _(b"changeset %s backed out, " b"don't forget to commit.\n")
         ui.status(msg % short(node))
         return 0
 
     def commitfunc(ui, repo, message, match, opts):
-        editform = 'backout'
+        editform = b'backout'
         e = cmdutil.getcommiteditor(
             editform=editform, **pycompat.strkwargs(opts)
         )
         if not message:
             # we don't translate commit messages
-            message = "Backed out changeset %s" % short(node)
+            message = b"Backed out changeset %s" % short(node)
             e = cmdutil.getcommiteditor(edit=True, editform=editform)
         return repo.commit(
-            message, opts.get('user'), opts.get('date'), match, editor=e
+            message, opts.get(b'user'), opts.get(b'date'), match, editor=e
         )
 
     newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
     if not newnode:
-        ui.status(_("nothing changed\n"))
+        ui.status(_(b"nothing changed\n"))
         return 1
     cmdutil.commitstatus(repo, newnode, branch, bheads)
 
     def nice(node):
-        return '%d:%s' % (repo.changelog.rev(node), short(node))
+        return b'%d:%s' % (repo.changelog.rev(node), short(node))
 
     ui.status(
-        _('changeset %s backs out changeset %s\n')
+        _(b'changeset %s backs out changeset %s\n')
         % (nice(repo.changelog.tip()), nice(node))
     )
-    if opts.get('merge') and op1 != node:
+    if opts.get(b'merge') and op1 != node:
         hg.clean(repo, op1, show_stats=False)
-        ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
-        overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
-        with ui.configoverride(overrides, 'backout'):
+        ui.status(
+            _(b'merging with changeset %s\n') % nice(repo.changelog.tip())
+        )
+        overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
+        with ui.configoverride(overrides, b'backout'):
             return hg.merge(repo, hex(repo.changelog.tip()))
     return 0
 
 
 @command(
-    'bisect',
+    b'bisect',
     [
-        ('r', 'reset', False, _('reset bisect state')),
-        ('g', 'good', False, _('mark changeset good')),
-        ('b', 'bad', False, _('mark changeset bad')),
-        ('s', 'skip', False, _('skip testing changeset')),
-        ('e', 'extend', False, _('extend the bisect range')),
+        (b'r', b'reset', False, _(b'reset bisect state')),
+        (b'g', b'good', False, _(b'mark changeset good')),
+        (b'b', b'bad', False, _(b'mark changeset bad')),
+        (b's', b'skip', False, _(b'skip testing changeset')),
+        (b'e', b'extend', False, _(b'extend the bisect range')),
         (
-            'c',
-            'command',
-            '',
-            _('use command to check changeset state'),
-            _('CMD'),
+            b'c',
+            b'command',
+            b'',
+            _(b'use command to check changeset state'),
+            _(b'CMD'),
         ),
-        ('U', 'noupdate', False, _('do not update to target')),
+        (b'U', b'noupdate', False, _(b'do not update to target')),
     ],
-    _("[-gbsr] [-U] [-c CMD] [REV]"),
+    _(b"[-gbsr] [-U] [-c CMD] [REV]"),
     helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
 )
 def bisect(
@@ -973,32 +994,32 @@
     Returns 0 on success.
     """
     # backward compatibility
-    if rev in "good bad reset init".split():
-        ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
+    if rev in b"good bad reset init".split():
+        ui.warn(_(b"(use of 'hg bisect <cmd>' is deprecated)\n"))
         cmd, rev, extra = rev, extra, None
-        if cmd == "good":
+        if cmd == b"good":
             good = True
-        elif cmd == "bad":
+        elif cmd == b"bad":
             bad = True
         else:
             reset = True
     elif extra:
-        raise error.Abort(_('incompatible arguments'))
+        raise error.Abort(_(b'incompatible arguments'))
 
     incompatibles = {
-        '--bad': bad,
-        '--command': bool(command),
-        '--extend': extend,
-        '--good': good,
-        '--reset': reset,
-        '--skip': skip,
+        b'--bad': bad,
+        b'--command': bool(command),
+        b'--extend': extend,
+        b'--good': good,
+        b'--reset': reset,
+        b'--skip': skip,
     }
 
     enabled = [x for x in incompatibles if incompatibles[x]]
 
     if len(enabled) > 1:
         raise error.Abort(
-            _('%s and %s are incompatible') % tuple(sorted(enabled)[0:2])
+            _(b'%s and %s are incompatible') % tuple(sorted(enabled)[0:2])
         )
 
     if reset:
@@ -1012,15 +1033,15 @@
         if rev:
             nodes = [repo[i].node() for i in scmutil.revrange(repo, [rev])]
         else:
-            nodes = [repo.lookup('.')]
+            nodes = [repo.lookup(b'.')]
         if good:
-            state['good'] += nodes
+            state[b'good'] += nodes
         elif bad:
-            state['bad'] += nodes
+            state[b'bad'] += nodes
         elif skip:
-            state['skip'] += nodes
+            state[b'skip'] += nodes
         hbisect.save_state(repo, state)
-        if not (state['good'] and state['bad']):
+        if not (state[b'good'] and state[b'bad']):
             return
 
     def mayupdate(repo, node, show_stats=True):
@@ -1037,45 +1058,45 @@
         changesets = 1
         if noupdate:
             try:
-                node = state['current'][0]
+                node = state[b'current'][0]
             except LookupError:
                 raise error.Abort(
                     _(
-                        'current bisect revision is unknown - '
-                        'start a new bisect to fix'
+                        b'current bisect revision is unknown - '
+                        b'start a new bisect to fix'
                     )
                 )
         else:
             node, p2 = repo.dirstate.parents()
             if p2 != nullid:
-                raise error.Abort(_('current bisect revision is a merge'))
+                raise error.Abort(_(b'current bisect revision is a merge'))
         if rev:
             node = repo[scmutil.revsingle(repo, rev, node)].node()
         try:
             while changesets:
                 # update state
-                state['current'] = [node]
+                state[b'current'] = [node]
                 hbisect.save_state(repo, state)
                 status = ui.system(
                     command,
-                    environ={'HG_NODE': hex(node)},
-                    blockedtag='bisect_check',
+                    environ={b'HG_NODE': hex(node)},
+                    blockedtag=b'bisect_check',
                 )
                 if status == 125:
-                    transition = "skip"
+                    transition = b"skip"
                 elif status == 0:
-                    transition = "good"
+                    transition = b"good"
                 # status < 0 means process was killed
                 elif status == 127:
-                    raise error.Abort(_("failed to execute %s") % command)
+                    raise error.Abort(_(b"failed to execute %s") % command)
                 elif status < 0:
-                    raise error.Abort(_("%s killed") % command)
+                    raise error.Abort(_(b"%s killed") % command)
                 else:
-                    transition = "bad"
+                    transition = b"bad"
                 state[transition].append(node)
                 ctx = repo[node]
                 ui.status(
-                    _('changeset %d:%s: %s\n') % (ctx.rev(), ctx, transition)
+                    _(b'changeset %d:%s: %s\n') % (ctx.rev(), ctx, transition)
                 )
                 hbisect.checkstate(state)
                 # bisect
@@ -1084,7 +1105,7 @@
                 node = nodes[0]
                 mayupdate(repo, node, show_stats=False)
         finally:
-            state['current'] = [node]
+            state[b'current'] = [node]
             hbisect.save_state(repo, state)
         hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
         return
@@ -1098,13 +1119,13 @@
             extendnode = hbisect.extendrange(repo, state, nodes, good)
             if extendnode is not None:
                 ui.write(
-                    _("Extending search to changeset %d:%s\n")
+                    _(b"Extending search to changeset %d:%s\n")
                     % (extendnode.rev(), extendnode)
                 )
-                state['current'] = [extendnode.node()]
+                state[b'current'] = [extendnode.node()]
                 hbisect.save_state(repo, state)
                 return mayupdate(repo, extendnode.node())
-        raise error.Abort(_("nothing to extend"))
+        raise error.Abort(_(b"nothing to extend"))
 
     if changesets == 0:
         hbisect.printresult(ui, repo, state, displayer, nodes, good)
@@ -1118,28 +1139,28 @@
         rev = repo.changelog.rev(node)
         ui.write(
             _(
-                "Testing changeset %d:%s "
-                "(%d changesets remaining, ~%d tests)\n"
+                b"Testing changeset %d:%s "
+                b"(%d changesets remaining, ~%d tests)\n"
             )
             % (rev, short(node), changesets, tests)
         )
-        state['current'] = [node]
+        state[b'current'] = [node]
         hbisect.save_state(repo, state)
         return mayupdate(repo, node)
 
 
 @command(
-    'bookmarks|bookmark',
+    b'bookmarks|bookmark',
     [
-        ('f', 'force', False, _('force')),
-        ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
-        ('d', 'delete', False, _('delete a given bookmark')),
-        ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
-        ('i', 'inactive', False, _('mark a bookmark inactive')),
-        ('l', 'list', False, _('list existing bookmarks')),
+        (b'f', b'force', False, _(b'force')),
+        (b'r', b'rev', b'', _(b'revision for bookmark action'), _(b'REV')),
+        (b'd', b'delete', False, _(b'delete a given bookmark')),
+        (b'm', b'rename', b'', _(b'rename a given bookmark'), _(b'OLD')),
+        (b'i', b'inactive', False, _(b'mark a bookmark inactive')),
+        (b'l', b'list', False, _(b'list existing bookmarks')),
     ]
     + formatteropts,
-    _('hg bookmarks [OPTIONS]... [NAME]...'),
+    _(b'hg bookmarks [OPTIONS]... [NAME]...'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def bookmark(ui, repo, *names, **opts):
@@ -1203,73 +1224,78 @@
           hg book -ql .
     '''
     opts = pycompat.byteskwargs(opts)
-    force = opts.get('force')
-    rev = opts.get('rev')
-    inactive = opts.get('inactive')  # meaning add/rename to inactive bookmark
-
-    selactions = [k for k in ['delete', 'rename', 'list'] if opts.get(k)]
+    force = opts.get(b'force')
+    rev = opts.get(b'rev')
+    inactive = opts.get(b'inactive')  # meaning add/rename to inactive bookmark
+
+    selactions = [k for k in [b'delete', b'rename', b'list'] if opts.get(k)]
     if len(selactions) > 1:
         raise error.Abort(
-            _('--%s and --%s are incompatible') % tuple(selactions[:2])
+            _(b'--%s and --%s are incompatible') % tuple(selactions[:2])
         )
     if selactions:
         action = selactions[0]
     elif names or rev:
-        action = 'add'
+        action = b'add'
     elif inactive:
-        action = 'inactive'  # meaning deactivate
+        action = b'inactive'  # meaning deactivate
     else:
-        action = 'list'
-
-    if rev and action in {'delete', 'rename', 'list'}:
-        raise error.Abort(_("--rev is incompatible with --%s") % action)
-    if inactive and action in {'delete', 'list'}:
-        raise error.Abort(_("--inactive is incompatible with --%s") % action)
-    if not names and action in {'add', 'delete'}:
-        raise error.Abort(_("bookmark name required"))
-
-    if action in {'add', 'delete', 'rename', 'inactive'}:
-        with repo.wlock(), repo.lock(), repo.transaction('bookmark') as tr:
-            if action == 'delete':
+        action = b'list'
+
+    if rev and action in {b'delete', b'rename', b'list'}:
+        raise error.Abort(_(b"--rev is incompatible with --%s") % action)
+    if inactive and action in {b'delete', b'list'}:
+        raise error.Abort(_(b"--inactive is incompatible with --%s") % action)
+    if not names and action in {b'add', b'delete'}:
+        raise error.Abort(_(b"bookmark name required"))
+
+    if action in {b'add', b'delete', b'rename', b'inactive'}:
+        with repo.wlock(), repo.lock(), repo.transaction(b'bookmark') as tr:
+            if action == b'delete':
                 names = pycompat.maplist(repo._bookmarks.expandname, names)
                 bookmarks.delete(repo, tr, names)
-            elif action == 'rename':
+            elif action == b'rename':
                 if not names:
-                    raise error.Abort(_("new bookmark name required"))
+                    raise error.Abort(_(b"new bookmark name required"))
                 elif len(names) > 1:
-                    raise error.Abort(_("only one new bookmark name allowed"))
-                oldname = repo._bookmarks.expandname(opts['rename'])
+                    raise error.Abort(_(b"only one new bookmark name allowed"))
+                oldname = repo._bookmarks.expandname(opts[b'rename'])
                 bookmarks.rename(repo, tr, oldname, names[0], force, inactive)
-            elif action == 'add':
+            elif action == b'add':
                 bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
-            elif action == 'inactive':
+            elif action == b'inactive':
                 if len(repo._bookmarks) == 0:
-                    ui.status(_("no bookmarks set\n"))
+                    ui.status(_(b"no bookmarks set\n"))
                 elif not repo._activebookmark:
-                    ui.status(_("no active bookmark\n"))
+                    ui.status(_(b"no active bookmark\n"))
                 else:
                     bookmarks.deactivate(repo)
-    elif action == 'list':
+    elif action == b'list':
         names = pycompat.maplist(repo._bookmarks.expandname, names)
-        with ui.formatter('bookmarks', opts) as fm:
+        with ui.formatter(b'bookmarks', opts) as fm:
             bookmarks.printbookmarks(ui, repo, fm, names)
     else:
-        raise error.ProgrammingError('invalid action: %s' % action)
+        raise error.ProgrammingError(b'invalid action: %s' % action)
 
 
 @command(
-    'branch',
+    b'branch',
     [
         (
-            'f',
-            'force',
+            b'f',
+            b'force',
             None,
-            _('set branch name even if it shadows an existing branch'),
+            _(b'set branch name even if it shadows an existing branch'),
         ),
-        ('C', 'clean', None, _('reset branch name to parent branch name')),
-        ('r', 'rev', [], _('change branches of the given revs (EXPERIMENTAL)')),
+        (b'C', b'clean', None, _(b'reset branch name to parent branch name')),
+        (
+            b'r',
+            b'rev',
+            [],
+            _(b'change branches of the given revs (EXPERIMENTAL)'),
+        ),
     ],
-    _('[-fC] [NAME]'),
+    _(b'[-fC] [NAME]'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def branch(ui, repo, label=None, **opts):
@@ -1302,64 +1328,64 @@
     Returns 0 on success.
     """
     opts = pycompat.byteskwargs(opts)
-    revs = opts.get('rev')
+    revs = opts.get(b'rev')
     if label:
         label = label.strip()
 
-    if not opts.get('clean') and not label:
+    if not opts.get(b'clean') and not label:
         if revs:
-            raise error.Abort(_("no branch name specified for the revisions"))
-        ui.write("%s\n" % repo.dirstate.branch())
+            raise error.Abort(_(b"no branch name specified for the revisions"))
+        ui.write(b"%s\n" % repo.dirstate.branch())
         return
 
     with repo.wlock():
-        if opts.get('clean'):
-            label = repo['.'].branch()
+        if opts.get(b'clean'):
+            label = repo[b'.'].branch()
             repo.dirstate.setbranch(label)
-            ui.status(_('reset working directory to branch %s\n') % label)
+            ui.status(_(b'reset working directory to branch %s\n') % label)
         elif label:
 
-            scmutil.checknewlabel(repo, label, 'branch')
+            scmutil.checknewlabel(repo, label, b'branch')
             if revs:
                 return cmdutil.changebranch(ui, repo, revs, label)
 
-            if not opts.get('force') and label in repo.branchmap():
+            if not opts.get(b'force') and label in repo.branchmap():
                 if label not in [p.branch() for p in repo[None].parents()]:
                     raise error.Abort(
-                        _('a branch of the same name already' ' exists'),
+                        _(b'a branch of the same name already' b' exists'),
                         # i18n: "it" refers to an existing branch
-                        hint=_("use 'hg update' to switch to it"),
+                        hint=_(b"use 'hg update' to switch to it"),
                     )
 
             repo.dirstate.setbranch(label)
-            ui.status(_('marked working directory as branch %s\n') % label)
+            ui.status(_(b'marked working directory as branch %s\n') % label)
 
             # find any open named branches aside from default
             for n, h, t, c in repo.branchmap().iterbranches():
-                if n != "default" and not c:
+                if n != b"default" and not c:
                     return 0
             ui.status(
                 _(
-                    '(branches are permanent and global, '
-                    'did you want a bookmark?)\n'
+                    b'(branches are permanent and global, '
+                    b'did you want a bookmark?)\n'
                 )
             )
 
 
 @command(
-    'branches',
+    b'branches',
     [
         (
-            'a',
-            'active',
+            b'a',
+            b'active',
             False,
-            _('show only branches that have unmerged heads (DEPRECATED)'),
+            _(b'show only branches that have unmerged heads (DEPRECATED)'),
         ),
-        ('c', 'closed', False, _('show normal and closed branches')),
-        ('r', 'rev', [], _('show branch name(s) of the given rev')),
+        (b'c', b'closed', False, _(b'show normal and closed branches')),
+        (b'r', b'rev', [], _(b'show branch name(s) of the given rev')),
     ]
     + formatteropts,
-    _('[-c]'),
+    _(b'[-c]'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
     intents={INTENT_READONLY},
 )
@@ -1388,15 +1414,15 @@
     """
 
     opts = pycompat.byteskwargs(opts)
-    revs = opts.get('rev')
+    revs = opts.get(b'rev')
     selectedbranches = None
     if revs:
         revs = scmutil.revrange(repo, revs)
         getbi = repo.revbranchcache().branchinfo
         selectedbranches = {getbi(r)[0] for r in revs}
 
-    ui.pager('branches')
-    fm = ui.formatter('branches', opts)
+    ui.pager(b'branches')
+    fm = ui.formatter(b'branches', opts)
     hexfunc = fm.hexfunc
 
     allheads = set(repo.heads())
@@ -1415,71 +1441,82 @@
         if active and not isactive:
             continue
         if isactive:
-            label = 'branches.active'
-            notice = ''
+            label = b'branches.active'
+            notice = b''
         elif not isopen:
             if not closed:
                 continue
-            label = 'branches.closed'
-            notice = _(' (closed)')
+            label = b'branches.closed'
+            notice = _(b' (closed)')
         else:
-            label = 'branches.inactive'
-            notice = _(' (inactive)')
+            label = b'branches.inactive'
+            notice = _(b' (inactive)')
         current = tag == repo.dirstate.branch()
         if current:
-            label = 'branches.current'
+            label = b'branches.current'
 
         fm.startitem()
-        fm.write('branch', '%s', tag, label=label)
+        fm.write(b'branch', b'%s', tag, label=label)
         rev = ctx.rev()
-        padsize = max(31 - len("%d" % rev) - encoding.colwidth(tag), 0)
-        fmt = ' ' * padsize + ' %d:%s'
+        padsize = max(31 - len(b"%d" % rev) - encoding.colwidth(tag), 0)
+        fmt = b' ' * padsize + b' %d:%s'
         fm.condwrite(
             not ui.quiet,
-            'rev node',
+            b'rev node',
             fmt,
             rev,
             hexfunc(ctx.node()),
-            label='log.changeset changeset.%s' % ctx.phasestr(),
+            label=b'log.changeset changeset.%s' % ctx.phasestr(),
         )
         fm.context(ctx=ctx)
         fm.data(active=isactive, closed=not isopen, current=current)
         if not ui.quiet:
             fm.plain(notice)
-        fm.plain('\n')
+        fm.plain(b'\n')
     fm.end()
 
 
 @command(
-    'bundle',
+    b'bundle',
     [
-        ('f', 'force', None, _('run even when the destination is unrelated')),
         (
-            'r',
-            'rev',
+            b'f',
+            b'force',
+            None,
+            _(b'run even when the destination is unrelated'),
+        ),
+        (
+            b'r',
+            b'rev',
             [],
-            _('a changeset intended to be added to the destination'),
-            _('REV'),
+            _(b'a changeset intended to be added to the destination'),
+            _(b'REV'),
         ),
         (
-            'b',
-            'branch',
+            b'b',
+            b'branch',
             [],
-            _('a specific branch you would like to bundle'),
-            _('BRANCH'),
+            _(b'a specific branch you would like to bundle'),
+            _(b'BRANCH'),
         ),
         (
-            '',
-            'base',
+            b'',
+            b'base',
             [],
-            _('a base changeset assumed to be available at the destination'),
-            _('REV'),
+            _(b'a base changeset assumed to be available at the destination'),
+            _(b'REV'),
         ),
-        ('a', 'all', None, _('bundle all changesets in the repository')),
-        ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
+        (b'a', b'all', None, _(b'bundle all changesets in the repository')),
+        (
+            b't',
+            b'type',
+            b'bzip2',
+            _(b'bundle compression type to use'),
+            _(b'TYPE'),
+        ),
     ]
     + remoteopts,
-    _('[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'),
+    _(b'[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'),
     helpcategory=command.CATEGORY_IMPORT_EXPORT,
 )
 def bundle(ui, repo, fname, dest=None, **opts):
@@ -1512,78 +1549,82 @@
     """
     opts = pycompat.byteskwargs(opts)
     revs = None
-    if 'rev' in opts:
-        revstrings = opts['rev']
+    if b'rev' in opts:
+        revstrings = opts[b'rev']
         revs = scmutil.revrange(repo, revstrings)
         if revstrings and not revs:
-            raise error.Abort(_('no commits to bundle'))
-
-    bundletype = opts.get('type', 'bzip2').lower()
+            raise error.Abort(_(b'no commits to bundle'))
+
+    bundletype = opts.get(b'type', b'bzip2').lower()
     try:
         bundlespec = exchange.parsebundlespec(repo, bundletype, strict=False)
     except error.UnsupportedBundleSpecification as e:
         raise error.Abort(
             pycompat.bytestr(e),
             hint=_(
-                "see 'hg help bundlespec' for supported " "values for --type"
+                b"see 'hg help bundlespec' for supported " b"values for --type"
             ),
         )
-    cgversion = bundlespec.contentopts["cg.version"]
+    cgversion = bundlespec.contentopts[b"cg.version"]
 
     # Packed bundles are a pseudo bundle format for now.
-    if cgversion == 's1':
+    if cgversion == b's1':
         raise error.Abort(
-            _('packed bundles cannot be produced by "hg bundle"'),
-            hint=_("use 'hg debugcreatestreamclonebundle'"),
+            _(b'packed bundles cannot be produced by "hg bundle"'),
+            hint=_(b"use 'hg debugcreatestreamclonebundle'"),
         )
 
-    if opts.get('all'):
+    if opts.get(b'all'):
         if dest:
             raise error.Abort(
-                _("--all is incompatible with specifying " "a destination")
+                _(b"--all is incompatible with specifying " b"a destination")
             )
-        if opts.get('base'):
-            ui.warn(_("ignoring --base because --all was specified\n"))
+        if opts.get(b'base'):
+            ui.warn(_(b"ignoring --base because --all was specified\n"))
         base = [nullrev]
     else:
-        base = scmutil.revrange(repo, opts.get('base'))
+        base = scmutil.revrange(repo, opts.get(b'base'))
     if cgversion not in changegroup.supportedoutgoingversions(repo):
         raise error.Abort(
-            _("repository does not support bundle version %s") % cgversion
+            _(b"repository does not support bundle version %s") % cgversion
         )
 
     if base:
         if dest:
             raise error.Abort(
-                _("--base is incompatible with specifying " "a destination")
+                _(b"--base is incompatible with specifying " b"a destination")
             )
         common = [repo[rev].node() for rev in base]
         heads = [repo[r].node() for r in revs] if revs else None
         outgoing = discovery.outgoing(repo, common, heads)
     else:
-        dest = ui.expandpath(dest or 'default-push', dest or 'default')
-        dest, branches = hg.parseurl(dest, opts.get('branch'))
+        dest = ui.expandpath(dest or b'default-push', dest or b'default')
+        dest, branches = hg.parseurl(dest, opts.get(b'branch'))
         other = hg.peer(repo, opts, dest)
         revs = [repo[r].hex() for r in revs]
         revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
         heads = revs and pycompat.maplist(repo.lookup, revs) or revs
         outgoing = discovery.findcommonoutgoing(
-            repo, other, onlyheads=heads, force=opts.get('force'), portable=True
+            repo,
+            other,
+            onlyheads=heads,
+            force=opts.get(b'force'),
+            portable=True,
         )
 
     if not outgoing.missing:
         scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
         return 1
 
-    if cgversion == '01':  # bundle1
-        bversion = 'HG10' + bundlespec.wirecompression
+    if cgversion == b'01':  # bundle1
+        bversion = b'HG10' + bundlespec.wirecompression
         bcompression = None
-    elif cgversion in ('02', '03'):
-        bversion = 'HG20'
+    elif cgversion in (b'02', b'03'):
+        bversion = b'HG20'
         bcompression = bundlespec.wirecompression
     else:
         raise error.ProgrammingError(
-            'bundle: unexpected changegroup version %s' % cgversion
+            b'bundle: unexpected changegroup version %s' % cgversion
         )
 
     # TODO compression options should be derived from bundlespec parsing.
@@ -1592,24 +1633,24 @@
     # b) introducing a command flag.
     compopts = {}
     complevel = ui.configint(
-        'experimental', 'bundlecomplevel.' + bundlespec.compression
+        b'experimental', b'bundlecomplevel.' + bundlespec.compression
     )
     if complevel is None:
-        complevel = ui.configint('experimental', 'bundlecomplevel')
+        complevel = ui.configint(b'experimental', b'bundlecomplevel')
     if complevel is not None:
-        compopts['level'] = complevel
+        compopts[b'level'] = complevel
 
     # Allow overriding the bundling of obsmarker in phases through
     # configuration while we don't have a bundle version that include them
-    if repo.ui.configbool('experimental', 'evolution.bundle-obsmarker'):
-        bundlespec.contentopts['obsolescence'] = True
-    if repo.ui.configbool('experimental', 'bundle-phases'):
-        bundlespec.contentopts['phases'] = True
+    if repo.ui.configbool(b'experimental', b'evolution.bundle-obsmarker'):
+        bundlespec.contentopts[b'obsolescence'] = True
+    if repo.ui.configbool(b'experimental', b'bundle-phases'):
+        bundlespec.contentopts[b'phases'] = True
 
     bundle2.writenewbundle(
         ui,
         repo,
-        'bundle',
+        b'bundle',
         fname,
         bversion,
         outgoing,
@@ -1620,21 +1661,21 @@
 
 
 @command(
-    'cat',
+    b'cat',
     [
         (
-            'o',
-            'output',
-            '',
-            _('print output to file with formatted name'),
-            _('FORMAT'),
+            b'o',
+            b'output',
+            b'',
+            _(b'print output to file with formatted name'),
+            _(b'FORMAT'),
         ),
-        ('r', 'rev', '', _('print the given revision'), _('REV')),
-        ('', 'decode', None, _('apply any matching decode filter')),
+        (b'r', b'rev', b'', _(b'print the given revision'), _(b'REV')),
+        (b'', b'decode', None, _(b'apply any matching decode filter')),
     ]
     + walkopts
     + formatteropts,
-    _('[OPTION]... FILE...'),
+    _(b'[OPTION]... FILE...'),
     helpcategory=command.CATEGORY_FILE_CONTENTS,
     inferrepo=True,
     intents={INTENT_READONLY},
@@ -1674,71 +1715,71 @@
     Returns 0 on success.
     """
     opts = pycompat.byteskwargs(opts)
-    rev = opts.get('rev')
+    rev = opts.get(b'rev')
     if rev:
-        repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
+        repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
     ctx = scmutil.revsingle(repo, rev)
     m = scmutil.match(ctx, (file1,) + pats, opts)
-    fntemplate = opts.pop('output', '')
+    fntemplate = opts.pop(b'output', b'')
     if cmdutil.isstdiofilename(fntemplate):
-        fntemplate = ''
+        fntemplate = b''
 
     if fntemplate:
-        fm = formatter.nullformatter(ui, 'cat', opts)
+        fm = formatter.nullformatter(ui, b'cat', opts)
     else:
-        ui.pager('cat')
-        fm = ui.formatter('cat', opts)
+        ui.pager(b'cat')
+        fm = ui.formatter(b'cat', opts)
     with fm:
         return cmdutil.cat(
-            ui, repo, ctx, m, fm, fntemplate, '', **pycompat.strkwargs(opts)
+            ui, repo, ctx, m, fm, fntemplate, b'', **pycompat.strkwargs(opts)
         )
 
 
 @command(
-    'clone',
+    b'clone',
     [
         (
-            'U',
-            'noupdate',
+            b'U',
+            b'noupdate',
             None,
             _(
-                'the clone will include an empty working '
-                'directory (only a repository)'
+                b'the clone will include an empty working '
+                b'directory (only a repository)'
             ),
         ),
         (
-            'u',
-            'updaterev',
-            '',
-            _('revision, tag, or branch to check out'),
-            _('REV'),
+            b'u',
+            b'updaterev',
+            b'',
+            _(b'revision, tag, or branch to check out'),
+            _(b'REV'),
         ),
         (
-            'r',
-            'rev',
+            b'r',
+            b'rev',
             [],
             _(
-                'do not clone everything, but include this changeset'
-                ' and its ancestors'
+                b'do not clone everything, but include this changeset'
+                b' and its ancestors'
             ),
-            _('REV'),
+            _(b'REV'),
         ),
         (
-            'b',
-            'branch',
+            b'b',
+            b'branch',
             [],
             _(
-                'do not clone everything, but include this branch\'s'
-                ' changesets and their ancestors'
+                b'do not clone everything, but include this branch\'s'
+                b' changesets and their ancestors'
             ),
-            _('BRANCH'),
+            _(b'BRANCH'),
         ),
-        ('', 'pull', None, _('use pull protocol to copy metadata')),
-        ('', 'uncompressed', None, _('an alias to --stream (DEPRECATED)')),
-        ('', 'stream', None, _('clone with minimal data processing')),
+        (b'', b'pull', None, _(b'use pull protocol to copy metadata')),
+        (b'', b'uncompressed', None, _(b'an alias to --stream (DEPRECATED)')),
+        (b'', b'stream', None, _(b'clone with minimal data processing')),
     ]
     + remoteopts,
-    _('[OPTION]... SOURCE [DEST]'),
+    _(b'[OPTION]... SOURCE [DEST]'),
     helpcategory=command.CATEGORY_REPO_CREATION,
     helpbasic=True,
     norepo=True,
@@ -1852,68 +1893,68 @@
     Returns 0 on success.
     """
     opts = pycompat.byteskwargs(opts)
-    if opts.get('noupdate') and opts.get('updaterev'):
-        raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
+    if opts.get(b'noupdate') and opts.get(b'updaterev'):
+        raise error.Abort(_(b"cannot specify both --noupdate and --updaterev"))
 
     # --include/--exclude can come from narrow or sparse.
     includepats, excludepats = None, None
 
     # hg.clone() differentiates between None and an empty set. So make sure
     # patterns are sets if narrow is requested without patterns.
-    if opts.get('narrow'):
+    if opts.get(b'narrow'):
         includepats = set()
         excludepats = set()
 
-        if opts.get('include'):
-            includepats = narrowspec.parsepatterns(opts.get('include'))
-        if opts.get('exclude'):
-            excludepats = narrowspec.parsepatterns(opts.get('exclude'))
+        if opts.get(b'include'):
+            includepats = narrowspec.parsepatterns(opts.get(b'include'))
+        if opts.get(b'exclude'):
+            excludepats = narrowspec.parsepatterns(opts.get(b'exclude'))
 
     r = hg.clone(
         ui,
         opts,
         source,
         dest,
-        pull=opts.get('pull'),
-        stream=opts.get('stream') or opts.get('uncompressed'),
-        revs=opts.get('rev'),
-        update=opts.get('updaterev') or not opts.get('noupdate'),
-        branch=opts.get('branch'),
-        shareopts=opts.get('shareopts'),
+        pull=opts.get(b'pull'),
+        stream=opts.get(b'stream') or opts.get(b'uncompressed'),
+        revs=opts.get(b'rev'),
+        update=opts.get(b'updaterev') or not opts.get(b'noupdate'),
+        branch=opts.get(b'branch'),
+        shareopts=opts.get(b'shareopts'),
         storeincludepats=includepats,
         storeexcludepats=excludepats,
-        depth=opts.get('depth') or None,
+        depth=opts.get(b'depth') or None,
     )
 
     return r is None
 
 
 @command(
-    'commit|ci',
+    b'commit|ci',
     [
         (
-            'A',
-            'addremove',
+            b'A',
+            b'addremove',
             None,
-            _('mark new/missing files as added/removed before committing'),
+            _(b'mark new/missing files as added/removed before committing'),
         ),
-        ('', 'close-branch', None, _('mark a branch head as closed')),
-        ('', 'amend', None, _('amend the parent of the working directory')),
-        ('s', 'secret', None, _('use the secret phase for committing')),
-        ('e', 'edit', None, _('invoke editor on commit messages')),
+        (b'', b'close-branch', None, _(b'mark a branch head as closed')),
+        (b'', b'amend', None, _(b'amend the parent of the working directory')),
+        (b's', b'secret', None, _(b'use the secret phase for committing')),
+        (b'e', b'edit', None, _(b'invoke editor on commit messages')),
         (
-            '',
-            'force-close-branch',
+            b'',
+            b'force-close-branch',
             None,
-            _('forcibly close branch from a non-head changeset (ADVANCED)'),
+            _(b'forcibly close branch from a non-head changeset (ADVANCED)'),
         ),
-        ('i', 'interactive', None, _('use interactive mode')),
+        (b'i', b'interactive', None, _(b'use interactive mode')),
     ]
     + walkopts
     + commitopts
     + commitopts2
     + subrepoopts,
-    _('[OPTION]... [FILE]...'),
+    _(b'[OPTION]... [FILE]...'),
     helpcategory=command.CATEGORY_COMMITTING,
     helpbasic=True,
     inferrepo=True,
@@ -1989,11 +2030,11 @@
         return 1 if ret == 0 else ret
 
     opts = pycompat.byteskwargs(opts)
-    if opts.get('subrepos'):
-        if opts.get('amend'):
-            raise error.Abort(_('cannot amend with --subrepos'))
+    if opts.get(b'subrepos'):
+        if opts.get(b'amend'):
+            raise error.Abort(_(b'cannot amend with --subrepos'))
         # Let --subrepos on the command line override config setting.
-        ui.setconfig('ui', 'commitsubrepos', True, 'commit')
+        ui.setconfig(b'ui', b'commitsubrepos', True, b'commit')
 
     cmdutil.checkunfinished(repo, commit=True)
 
@@ -2001,38 +2042,38 @@
     bheads = repo.branchheads(branch)
 
     extra = {}
-    if opts.get('close_branch') or opts.get('force_close_branch'):
-        extra['close'] = '1'
-
-        if repo['.'].closesbranch():
+    if opts.get(b'close_branch') or opts.get(b'force_close_branch'):
+        extra[b'close'] = b'1'
+
+        if repo[b'.'].closesbranch():
             raise error.Abort(
-                _('current revision is already a branch closing' ' head')
+                _(b'current revision is already a branch closing' b' head')
             )
         elif not bheads:
-            raise error.Abort(_('branch "%s" has no heads to close') % branch)
+            raise error.Abort(_(b'branch "%s" has no heads to close') % branch)
         elif (
-            branch == repo['.'].branch()
-            and repo['.'].node() not in bheads
-            and not opts.get('force_close_branch')
+            branch == repo[b'.'].branch()
+            and repo[b'.'].node() not in bheads
+            and not opts.get(b'force_close_branch')
         ):
             hint = _(
-                'use --force-close-branch to close branch from a non-head'
-                ' changeset'
+                b'use --force-close-branch to close branch from a non-head'
+                b' changeset'
             )
-            raise error.Abort(_('can only close branch heads'), hint=hint)
-        elif opts.get('amend'):
+            raise error.Abort(_(b'can only close branch heads'), hint=hint)
+        elif opts.get(b'amend'):
             if (
-                repo['.'].p1().branch() != branch
-                and repo['.'].p2().branch() != branch
+                repo[b'.'].p1().branch() != branch
+                and repo[b'.'].p2().branch() != branch
             ):
-                raise error.Abort(_('can only close branch heads'))
-
-    if opts.get('amend'):
-        if ui.configbool('ui', 'commitsubrepos'):
-            raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
-
-        old = repo['.']
-        rewriteutil.precheck(repo, [old.rev()], 'amend')
+                raise error.Abort(_(b'can only close branch heads'))
+
+    if opts.get(b'amend'):
+        if ui.configbool(b'ui', b'commitsubrepos'):
+            raise error.Abort(_(b'cannot amend with ui.commitsubrepos enabled'))
+
+        old = repo[b'.']
+        rewriteutil.precheck(repo, [old.rev()], b'amend')
 
         # Currently histedit gets confused if an amend happens while histedit
         # is in progress. Since we have a checkunfinished command, we are
@@ -2045,28 +2086,28 @@
 
         node = cmdutil.amend(ui, repo, old, extra, pats, opts)
         if node == old.node():
-            ui.status(_("nothing changed\n"))
+            ui.status(_(b"nothing changed\n"))
             return 1
     else:
 
         def commitfunc(ui, repo, message, match, opts):
             overrides = {}
-            if opts.get('secret'):
-                overrides[('phases', 'new-commit')] = 'secret'
+            if opts.get(b'secret'):
+                overrides[(b'phases', b'new-commit')] = b'secret'
 
             baseui = repo.baseui
-            with baseui.configoverride(overrides, 'commit'):
-                with ui.configoverride(overrides, 'commit'):
+            with baseui.configoverride(overrides, b'commit'):
+                with ui.configoverride(overrides, b'commit'):
                     editform = cmdutil.mergeeditform(
-                        repo[None], 'commit.normal'
+                        repo[None], b'commit.normal'
                     )
                     editor = cmdutil.getcommiteditor(
                         editform=editform, **pycompat.strkwargs(opts)
                     )
                     return repo.commit(
                         message,
-                        opts.get('user'),
-                        opts.get('date'),
+                        opts.get(b'user'),
+                        opts.get(b'date'),
                         match,
                         editor=editor,
                         extra=extra,
@@ -2079,18 +2120,18 @@
             if stat[3]:
                 ui.status(
                     _(
-                        "nothing changed (%d missing files, see "
-                        "'hg status')\n"
+                        b"nothing changed (%d missing files, see "
+                        b"'hg status')\n"
                     )
                     % len(stat[3])
                 )
             else:
-                ui.status(_("nothing changed\n"))
+                ui.status(_(b"nothing changed\n"))
             return 1
 
     cmdutil.commitstatus(repo, node, branch, bheads, opts)
 
-    if not ui.quiet and ui.configbool('commands', 'commit.post-status'):
+    if not ui.quiet and ui.configbool(b'commands', b'commit.post-status'):
         status(
             ui,
             repo,
@@ -2099,20 +2140,20 @@
             removed=True,
             deleted=True,
             unknown=True,
-            subrepos=opts.get('subrepos'),
+            subrepos=opts.get(b'subrepos'),
         )
 
 
 @command(
-    'config|showconfig|debugconfig',
+    b'config|showconfig|debugconfig',
     [
-        ('u', 'untrusted', None, _('show untrusted configuration options')),
-        ('e', 'edit', None, _('edit user config')),
-        ('l', 'local', None, _('edit repository config')),
-        ('g', 'global', None, _('edit global config')),
+        (b'u', b'untrusted', None, _(b'show untrusted configuration options')),
+        (b'e', b'edit', None, _(b'edit user config')),
+        (b'l', b'local', None, _(b'edit repository config')),
+        (b'g', b'global', None, _(b'edit global config')),
     ]
     + formatteropts,
-    _('[-u] [NAME]...'),
+    _(b'[-u] [NAME]...'),
     helpcategory=command.CATEGORY_HELP,
     optionalrepo=True,
     intents={INTENT_READONLY},
@@ -2152,15 +2193,15 @@
     """
 
     opts = pycompat.byteskwargs(opts)
-    if opts.get('edit') or opts.get('local') or opts.get('global'):
-        if opts.get('local') and opts.get('global'):
-            raise error.Abort(_("can't use --local and --global together"))
-
-        if opts.get('local'):
+    if opts.get(b'edit') or opts.get(b'local') or opts.get(b'global'):
+        if opts.get(b'local') and opts.get(b'global'):
+            raise error.Abort(_(b"can't use --local and --global together"))
+
+        if opts.get(b'local'):
             if not repo:
-                raise error.Abort(_("can't use --local outside a repository"))
-            paths = [repo.vfs.join('hgrc')]
-        elif opts.get('global'):
+                raise error.Abort(_(b"can't use --local outside a repository"))
+            paths = [repo.vfs.join(b'hgrc')]
+        elif opts.get(b'global'):
             paths = rcutil.systemrcpath()
         else:
             paths = rcutil.userrcpath()
@@ -2169,42 +2210,42 @@
             if os.path.exists(f):
                 break
         else:
-            if opts.get('global'):
-                samplehgrc = uimod.samplehgrcs['global']
-            elif opts.get('local'):
-                samplehgrc = uimod.samplehgrcs['local']
+            if opts.get(b'global'):
+                samplehgrc = uimod.samplehgrcs[b'global']
+            elif opts.get(b'local'):
+                samplehgrc = uimod.samplehgrcs[b'local']
             else:
-                samplehgrc = uimod.samplehgrcs['user']
+                samplehgrc = uimod.samplehgrcs[b'user']
 
             f = paths[0]
-            fp = open(f, "wb")
+            fp = open(f, b"wb")
             fp.write(util.tonativeeol(samplehgrc))
             fp.close()
 
         editor = ui.geteditor()
         ui.system(
-            "%s \"%s\"" % (editor, f),
+            b"%s \"%s\"" % (editor, f),
             onerr=error.Abort,
-            errprefix=_("edit failed"),
-            blockedtag='config_edit',
+            errprefix=_(b"edit failed"),
+            blockedtag=b'config_edit',
         )
         return
-    ui.pager('config')
-    fm = ui.formatter('config', opts)
+    ui.pager(b'config')
+    fm = ui.formatter(b'config', opts)
     for t, f in rcutil.rccomponents():
-        if t == 'path':
-            ui.debug('read config from: %s\n' % f)
-        elif t == 'items':
+        if t == b'path':
+            ui.debug(b'read config from: %s\n' % f)
+        elif t == b'items':
             for section, name, value, source in f:
-                ui.debug('set config by: %s\n' % source)
+                ui.debug(b'set config by: %s\n' % source)
         else:
-            raise error.ProgrammingError('unknown rctype: %s' % t)
-    untrusted = bool(opts.get('untrusted'))
+            raise error.ProgrammingError(b'unknown rctype: %s' % t)
+    untrusted = bool(opts.get(b'untrusted'))
 
     selsections = selentries = []
     if values:
-        selsections = [v for v in values if '.' not in v]
-        selentries = [v for v in values if '.' in v]
+        selsections = [v for v in values if b'.' not in v]
+        selentries = [v for v in values if b'.' in v]
     uniquesel = len(selentries) == 1 and not selsections
     selsections = set(selsections)
     selentries = set(selentries)
@@ -2215,18 +2256,18 @@
         value = pycompat.bytestr(value)
         defaultvalue = ui.configdefault(section, name)
         if fm.isplain():
-            source = source or 'none'
-            value = value.replace('\n', '\\n')
-        entryname = section + '.' + name
+            source = source or b'none'
+            value = value.replace(b'\n', b'\\n')
+        entryname = section + b'.' + name
         if values and not (section in selsections or entryname in selentries):
             continue
         fm.startitem()
-        fm.condwrite(ui.debugflag, 'source', '%s: ', source)
+        fm.condwrite(ui.debugflag, b'source', b'%s: ', source)
         if uniquesel:
             fm.data(name=entryname)
-            fm.write('value', '%s\n', value)
+            fm.write(b'value', b'%s\n', value)
         else:
-            fm.write('name value', '%s=%s\n', entryname, value)
+            fm.write(b'name value', b'%s=%s\n', entryname, value)
         fm.data(defaultvalue=defaultvalue)
         matched = True
     fm.end()
@@ -2236,7 +2277,7 @@
 
 
 @command(
-    'continue',
+    b'continue',
     dryrunopts,
     helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
     helpbasic=True,
@@ -2252,30 +2293,35 @@
     dryrun = opts.get(r'dry_run')
     contstate = cmdutil.getunfinishedstate(repo)
     if not contstate:
-        raise error.Abort(_('no operation in progress'))
+        raise error.Abort(_(b'no operation in progress'))
     if not contstate.continuefunc:
         raise error.Abort(
             (
-                _("%s in progress but does not support " "'hg continue'")
+                _(b"%s in progress but does not support " b"'hg continue'")
                 % (contstate._opname)
             ),
             hint=contstate.continuemsg(),
         )
     if dryrun:
-        ui.status(_('%s in progress, will be resumed\n') % (contstate._opname))
+        ui.status(_(b'%s in progress, will be resumed\n') % (contstate._opname))
         return
     return contstate.continuefunc(ui, repo)
 
 
 @command(
-    'copy|cp',
+    b'copy|cp',
     [
-        ('A', 'after', None, _('record a copy that has already occurred')),
-        ('f', 'force', None, _('forcibly copy over an existing managed file')),
+        (b'A', b'after', None, _(b'record a copy that has already occurred')),
+        (
+            b'f',
+            b'force',
+            None,
+            _(b'forcibly copy over an existing managed file'),
+        ),
     ]
     + walkopts
     + dryrunopts,
-    _('[OPTION]... SOURCE... DEST'),
+    _(b'[OPTION]... SOURCE... DEST'),
     helpcategory=command.CATEGORY_FILE_CONTENTS,
 )
 def copy(ui, repo, *pats, **opts):
@@ -2300,28 +2346,28 @@
 
 
 @command(
-    'debugcommands',
+    b'debugcommands',
     [],
-    _('[COMMAND]'),
+    _(b'[COMMAND]'),
     helpcategory=command.CATEGORY_HELP,
     norepo=True,
 )
-def debugcommands(ui, cmd='', *args):
+def debugcommands(ui, cmd=b'', *args):
     """list all available commands and options"""
     for cmd, vals in sorted(table.iteritems()):
-        cmd = cmd.split('|')[0]
-        opts = ', '.join([i[1] for i in vals[1]])
-        ui.write('%s: %s\n' % (cmd, opts))
+        cmd = cmd.split(b'|')[0]
+        opts = b', '.join([i[1] for i in vals[1]])
+        ui.write(b'%s: %s\n' % (cmd, opts))
 
 
 @command(
-    'debugcomplete',
-    [('o', 'options', None, _('show the command options'))],
-    _('[-o] CMD'),
+    b'debugcomplete',
+    [(b'o', b'options', None, _(b'show the command options'))],
+    _(b'[-o] CMD'),
     helpcategory=command.CATEGORY_HELP,
     norepo=True,
 )
-def debugcomplete(ui, cmd='', **opts):
+def debugcomplete(ui, cmd=b'', **opts):
     """returns the completion list associated with the given command"""
 
     if opts.get(r'options'):
@@ -2332,31 +2378,31 @@
             otables.append(entry[1])
         for t in otables:
             for o in t:
-                if "(DEPRECATED)" in o[3]:
+                if b"(DEPRECATED)" in o[3]:
                     continue
                 if o[0]:
-                    options.append('-%s' % o[0])
-                options.append('--%s' % o[1])
-        ui.write("%s\n" % "\n".join(options))
+                    options.append(b'-%s' % o[0])
+                options.append(b'--%s' % o[1])
+        ui.write(b"%s\n" % b"\n".join(options))
         return
 
     cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
     if ui.verbose:
-        cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
-    ui.write("%s\n" % "\n".join(sorted(cmdlist)))
+        cmdlist = [b' '.join(c[0]) for c in cmdlist.values()]
+    ui.write(b"%s\n" % b"\n".join(sorted(cmdlist)))
 
 
 @command(
-    'diff',
+    b'diff',
     [
-        ('r', 'rev', [], _('revision'), _('REV')),
-        ('c', 'change', '', _('change made by revision'), _('REV')),
+        (b'r', b'rev', [], _(b'revision'), _(b'REV')),
+        (b'c', b'change', b'', _(b'change made by revision'), _(b'REV')),
     ]
     + diffopts
     + diffopts2
     + walkopts
     + subrepoopts,
-    _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
+    _(b'[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
     helpcategory=command.CATEGORY_FILE_CONTENTS,
     helpbasic=True,
     inferrepo=True,
@@ -2421,20 +2467,20 @@
     """
 
     opts = pycompat.byteskwargs(opts)
-    revs = opts.get('rev')
-    change = opts.get('change')
-    stat = opts.get('stat')
-    reverse = opts.get('reverse')
+    revs = opts.get(b'rev')
+    change = opts.get(b'change')
+    stat = opts.get(b'stat')
+    reverse = opts.get(b'reverse')
 
     if revs and change:
-        msg = _('cannot specify --rev and --change at the same time')
+        msg = _(b'cannot specify --rev and --change at the same time')
         raise error.Abort(msg)
     elif change:
-        repo = scmutil.unhidehashlikerevs(repo, [change], 'nowarn')
+        repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn')
         ctx2 = scmutil.revsingle(repo, change, None)
         ctx1 = ctx2.p1()
     else:
-        repo = scmutil.unhidehashlikerevs(repo, revs, 'nowarn')
+        repo = scmutil.unhidehashlikerevs(repo, revs, b'nowarn')
         ctx1, ctx2 = scmutil.revpair(repo, revs)
     node1, node2 = ctx1.node(), ctx2.node()
 
@@ -2444,7 +2490,7 @@
     diffopts = patch.diffallopts(ui, opts)
     m = scmutil.match(ctx2, pats, opts)
     m = repo.narrowmatch(m)
-    ui.pager('diff')
+    ui.pager(b'diff')
     logcmdutil.diffordiffstat(
         ui,
         repo,
@@ -2453,34 +2499,34 @@
         node2,
         m,
         stat=stat,
-        listsubrepos=opts.get('subrepos'),
-        root=opts.get('root'),
+        listsubrepos=opts.get(b'subrepos'),
+        root=opts.get(b'root'),
     )
 
 
 @command(
-    'export',
+    b'export',
     [
         (
-            'B',
-            'bookmark',
-            '',
-            _('export changes only reachable by given bookmark'),
-            _('BOOKMARK'),
+            b'B',
+            b'bookmark',
+            b'',
+            _(b'export changes only reachable by given bookmark'),
+            _(b'BOOKMARK'),
         ),
         (
-            'o',
-            'output',
-            '',
-            _('print output to file with formatted name'),
-            _('FORMAT'),
+            b'o',
+            b'output',
+            b'',
+            _(b'print output to file with formatted name'),
+            _(b'FORMAT'),
         ),
-        ('', 'switch-parent', None, _('diff against the second parent')),
-        ('r', 'rev', [], _('revisions to export'), _('REV')),
+        (b'', b'switch-parent', None, _(b'diff against the second parent')),
+        (b'r', b'rev', [], _(b'revisions to export'), _(b'REV')),
     ]
     + diffopts
     + formatteropts,
-    _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'),
+    _(b'[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'),
     helpcategory=command.CATEGORY_IMPORT_EXPORT,
     helpbasic=True,
     intents={INTENT_READONLY},
@@ -2560,61 +2606,72 @@
     Returns 0 on success.
     """
     opts = pycompat.byteskwargs(opts)
-    bookmark = opts.get('bookmark')
-    changesets += tuple(opts.get('rev', []))
+    bookmark = opts.get(b'bookmark')
+    changesets += tuple(opts.get(b'rev', []))
 
     if bookmark and changesets:
-        raise error.Abort(_("-r and -B are mutually exclusive"))
+        raise error.Abort(_(b"-r and -B are mutually exclusive"))
 
     if bookmark:
         if bookmark not in repo._bookmarks:
-            raise error.Abort(_("bookmark '%s' not found") % bookmark)
+            raise error.Abort(_(b"bookmark '%s' not found") % bookmark)
 
         revs = scmutil.bookmarkrevs(repo, bookmark)
     else:
         if not changesets:
-            changesets = ['.']
-
-        repo = scmutil.unhidehashlikerevs(repo, changesets, 'nowarn')
+            changesets = [b'.']
+
+        repo = scmutil.unhidehashlikerevs(repo, changesets, b'nowarn')
         revs = scmutil.revrange(repo, changesets)
 
     if not revs:
-        raise error.Abort(_("export requires at least one changeset"))
+        raise error.Abort(_(b"export requires at least one changeset"))
     if len(revs) > 1:
-        ui.note(_('exporting patches:\n'))
+        ui.note(_(b'exporting patches:\n'))
     else:
-        ui.note(_('exporting patch:\n'))
-
-    fntemplate = opts.get('output')
+        ui.note(_(b'exporting patch:\n'))
+
+    fntemplate = opts.get(b'output')
     if cmdutil.isstdiofilename(fntemplate):
-        fntemplate = ''
+        fntemplate = b''
 
     if fntemplate:
-        fm = formatter.nullformatter(ui, 'export', opts)
+        fm = formatter.nullformatter(ui, b'export', opts)
     else:
-        ui.pager('export')
-        fm = ui.formatter('export', opts)
+        ui.pager(b'export')
+        fm = ui.formatter(b'export', opts)
     with fm:
         cmdutil.export(
             repo,
             revs,
             fm,
             fntemplate=fntemplate,
-            switch_parent=opts.get('switch_parent'),
+            switch_parent=opts.get(b'switch_parent'),
             opts=patch.diffallopts(ui, opts),
         )
 
 
 @command(
-    'files',
+    b'files',
     [
-        ('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
-        ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
+        (
+            b'r',
+            b'rev',
+            b'',
+            _(b'search the repository as it is in REV'),
+            _(b'REV'),
+        ),
+        (
+            b'0',
+            b'print0',
+            None,
+            _(b'end filenames with NUL, for use with xargs'),
+        ),
     ]
     + walkopts
     + formatteropts
     + subrepoopts,
-    _('[OPTION]... [FILE]...'),
+    _(b'[OPTION]... [FILE]...'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
     intents={INTENT_READONLY},
 )
@@ -2673,31 +2730,31 @@
     """
 
     opts = pycompat.byteskwargs(opts)
-    rev = opts.get('rev')
+    rev = opts.get(b'rev')
     if rev:
-        repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
+        repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
     ctx = scmutil.revsingle(repo, rev, None)
 
-    end = '\n'
-    if opts.get('print0'):
-        end = '\0'
-    fmt = '%s' + end
+    end = b'\n'
+    if opts.get(b'print0'):
+        end = b'\0'
+    fmt = b'%s' + end
 
     m = scmutil.match(ctx, pats, opts)
-    ui.pager('files')
+    ui.pager(b'files')
     uipathfn = scmutil.getuipathfn(ctx.repo(), legacyrelativevalue=True)
-    with ui.formatter('files', opts) as fm:
+    with ui.formatter(b'files', opts) as fm:
         return cmdutil.files(
-            ui, ctx, m, uipathfn, fm, fmt, opts.get('subrepos')
+            ui, ctx, m, uipathfn, fm, fmt, opts.get(b'subrepos')
         )
 
 
 @command(
-    'forget',
-    [('i', 'interactive', None, _('use interactive mode')),]
+    b'forget',
+    [(b'i', b'interactive', None, _(b'use interactive mode')),]
     + walkopts
     + dryrunopts,
-    _('[OPTION]... FILE...'),
+    _(b'[OPTION]... FILE...'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
     helpbasic=True,
     inferrepo=True,
@@ -2733,16 +2790,16 @@
 
     opts = pycompat.byteskwargs(opts)
     if not pats:
-        raise error.Abort(_('no files specified'))
+        raise error.Abort(_(b'no files specified'))
 
     m = scmutil.match(repo[None], pats, opts)
-    dryrun, interactive = opts.get('dry_run'), opts.get('interactive')
+    dryrun, interactive = opts.get(b'dry_run'), opts.get(b'interactive')
     uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
     rejected = cmdutil.forget(
         ui,
         repo,
         m,
-        prefix="",
+        prefix=b"",
         uipathfn=uipathfn,
         explicitonly=False,
         dryrun=dryrun,
@@ -2752,40 +2809,45 @@
 
 
 @command(
-    'graft',
+    b'graft',
     [
-        ('r', 'rev', [], _('revisions to graft'), _('REV')),
+        (b'r', b'rev', [], _(b'revisions to graft'), _(b'REV')),
         (
-            '',
-            'base',
-            '',
-            _('base revision when doing the graft merge (ADVANCED)'),
-            _('REV'),
+            b'',
+            b'base',
+            b'',
+            _(b'base revision when doing the graft merge (ADVANCED)'),
+            _(b'REV'),
         ),
-        ('c', 'continue', False, _('resume interrupted graft')),
-        ('', 'stop', False, _('stop interrupted graft')),
-        ('', 'abort', False, _('abort interrupted graft')),
-        ('e', 'edit', False, _('invoke editor on commit messages')),
-        ('', 'log', None, _('append graft info to log message')),
+        (b'c', b'continue', False, _(b'resume interrupted graft')),
+        (b'', b'stop', False, _(b'stop interrupted graft')),
+        (b'', b'abort', False, _(b'abort interrupted graft')),
+        (b'e', b'edit', False, _(b'invoke editor on commit messages')),
+        (b'', b'log', None, _(b'append graft info to log message')),
         (
-            '',
-            'no-commit',
+            b'',
+            b'no-commit',
             None,
-            _("don't commit, just apply the changes in working directory"),
+            _(b"don't commit, just apply the changes in working directory"),
         ),
-        ('f', 'force', False, _('force graft')),
+        (b'f', b'force', False, _(b'force graft')),
         (
-            'D',
-            'currentdate',
+            b'D',
+            b'currentdate',
             False,
-            _('record the current date as commit date'),
+            _(b'record the current date as commit date'),
         ),
-        ('U', 'currentuser', False, _('record the current user as committer')),
+        (
+            b'U',
+            b'currentuser',
+            False,
+            _(b'record the current user as committer'),
+        ),
     ]
     + commitopts2
     + mergetoolopts
     + dryrunopts,
-    _('[OPTION]... [-r REV]... REV...'),
+    _(b'[OPTION]... [-r REV]... REV...'),
     helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
 )
 def graft(ui, repo, *revs, **opts):
@@ -2889,120 +2951,122 @@
 
 def _dograft(ui, repo, *revs, **opts):
     opts = pycompat.byteskwargs(opts)
-    if revs and opts.get('rev'):
+    if revs and opts.get(b'rev'):
         ui.warn(
             _(
-                'warning: inconsistent use of --rev might give unexpected '
-                'revision ordering!\n'
+                b'warning: inconsistent use of --rev might give unexpected '
+                b'revision ordering!\n'
             )
         )
 
     revs = list(revs)
-    revs.extend(opts.get('rev'))
+    revs.extend(opts.get(b'rev'))
     basectx = None
-    if opts.get('base'):
-        basectx = scmutil.revsingle(repo, opts['base'], None)
+    if opts.get(b'base'):
+        basectx = scmutil.revsingle(repo, opts[b'base'], None)
     # a dict of data to be stored in state file
     statedata = {}
     # list of new nodes created by ongoing graft
-    statedata['newnodes'] = []
-
-    if opts.get('user') and opts.get('currentuser'):
-        raise error.Abort(_('--user and --currentuser are mutually exclusive'))
-    if opts.get('date') and opts.get('currentdate'):
-        raise error.Abort(_('--date and --currentdate are mutually exclusive'))
-    if not opts.get('user') and opts.get('currentuser'):
-        opts['user'] = ui.username()
-    if not opts.get('date') and opts.get('currentdate'):
-        opts['date'] = "%d %d" % dateutil.makedate()
+    statedata[b'newnodes'] = []
+
+    if opts.get(b'user') and opts.get(b'currentuser'):
+        raise error.Abort(_(b'--user and --currentuser are mutually exclusive'))
+    if opts.get(b'date') and opts.get(b'currentdate'):
+        raise error.Abort(_(b'--date and --currentdate are mutually exclusive'))
+    if not opts.get(b'user') and opts.get(b'currentuser'):
+        opts[b'user'] = ui.username()
+    if not opts.get(b'date') and opts.get(b'currentdate'):
+        opts[b'date'] = b"%d %d" % dateutil.makedate()
 
     editor = cmdutil.getcommiteditor(
-        editform='graft', **pycompat.strkwargs(opts)
+        editform=b'graft', **pycompat.strkwargs(opts)
     )
 
     cont = False
-    if opts.get('no_commit'):
-        if opts.get('edit'):
+    if opts.get(b'no_commit'):
+        if opts.get(b'edit'):
             raise error.Abort(
-                _("cannot specify --no-commit and " "--edit together")
+                _(b"cannot specify --no-commit and " b"--edit together")
             )
-        if opts.get('currentuser'):
+        if opts.get(b'currentuser'):
             raise error.Abort(
-                _("cannot specify --no-commit and " "--currentuser together")
+                _(b"cannot specify --no-commit and " b"--currentuser together")
             )
-        if opts.get('currentdate'):
+        if opts.get(b'currentdate'):
             raise error.Abort(
-                _("cannot specify --no-commit and " "--currentdate together")
+                _(b"cannot specify --no-commit and " b"--currentdate together")
             )
-        if opts.get('log'):
+        if opts.get(b'log'):
             raise error.Abort(
-                _("cannot specify --no-commit and " "--log together")
+                _(b"cannot specify --no-commit and " b"--log together")
             )
 
-    graftstate = statemod.cmdstate(repo, 'graftstate')
-
-    if opts.get('stop'):
-        if opts.get('continue'):
+    graftstate = statemod.cmdstate(repo, b'graftstate')
+
+    if opts.get(b'stop'):
+        if opts.get(b'continue'):
             raise error.Abort(
-                _("cannot use '--continue' and " "'--stop' together")
+                _(b"cannot use '--continue' and " b"'--stop' together")
             )
-        if opts.get('abort'):
-            raise error.Abort(_("cannot use '--abort' and '--stop' together"))
+        if opts.get(b'abort'):
+            raise error.Abort(_(b"cannot use '--abort' and '--stop' together"))
 
         if any(
             (
-                opts.get('edit'),
-                opts.get('log'),
-                opts.get('user'),
-                opts.get('date'),
-                opts.get('currentdate'),
-                opts.get('currentuser'),
-                opts.get('rev'),
+                opts.get(b'edit'),
+                opts.get(b'log'),
+                opts.get(b'user'),
+                opts.get(b'date'),
+                opts.get(b'currentdate'),
+                opts.get(b'currentuser'),
+                opts.get(b'rev'),
             )
         ):
-            raise error.Abort(_("cannot specify any other flag with '--stop'"))
+            raise error.Abort(_(b"cannot specify any other flag with '--stop'"))
         return _stopgraft(ui, repo, graftstate)
-    elif opts.get('abort'):
-        if opts.get('continue'):
+    elif opts.get(b'abort'):
+        if opts.get(b'continue'):
             raise error.Abort(
-                _("cannot use '--continue' and " "'--abort' together")
+                _(b"cannot use '--continue' and " b"'--abort' together")
             )
         if any(
             (
-                opts.get('edit'),
-                opts.get('log'),
-                opts.get('user'),
-                opts.get('date'),
-                opts.get('currentdate'),
-                opts.get('currentuser'),
-                opts.get('rev'),
+                opts.get(b'edit'),
+                opts.get(b'log'),
+                opts.get(b'user'),
+                opts.get(b'date'),
+                opts.get(b'currentdate'),
+                opts.get(b'currentuser'),
+                opts.get(b'rev'),
             )
         ):
-            raise error.Abort(_("cannot specify any other flag with '--abort'"))
+            raise error.Abort(
+                _(b"cannot specify any other flag with '--abort'")
+            )
 
         return cmdutil.abortgraft(ui, repo, graftstate)
-    elif opts.get('continue'):
+    elif opts.get(b'continue'):
         cont = True
         if revs:
-            raise error.Abort(_("can't specify --continue and revisions"))
+            raise error.Abort(_(b"can't specify --continue and revisions"))
         # read in unfinished revisions
         if graftstate.exists():
             statedata = cmdutil.readgraftstate(repo, graftstate)
-            if statedata.get('date'):
-                opts['date'] = statedata['date']
-            if statedata.get('user'):
-                opts['user'] = statedata['user']
-            if statedata.get('log'):
-                opts['log'] = True
-            if statedata.get('no_commit'):
-                opts['no_commit'] = statedata.get('no_commit')
-            nodes = statedata['nodes']
+            if statedata.get(b'date'):
+                opts[b'date'] = statedata[b'date']
+            if statedata.get(b'user'):
+                opts[b'user'] = statedata[b'user']
+            if statedata.get(b'log'):
+                opts[b'log'] = True
+            if statedata.get(b'no_commit'):
+                opts[b'no_commit'] = statedata.get(b'no_commit')
+            nodes = statedata[b'nodes']
             revs = [repo[node].rev() for node in nodes]
         else:
-            cmdutil.wrongtooltocontinue(repo, _('graft'))
+            cmdutil.wrongtooltocontinue(repo, _(b'graft'))
     else:
         if not revs:
-            raise error.Abort(_('no revisions specified'))
+            raise error.Abort(_(b'no revisions specified'))
         cmdutil.checkunfinished(repo)
         cmdutil.bailifchanged(repo)
         revs = scmutil.revrange(repo, revs)
@@ -3010,14 +3074,14 @@
     skipped = set()
     if basectx is None:
         # check for merges
-        for rev in repo.revs('%ld and merge()', revs):
-            ui.warn(_('skipping ungraftable merge revision %d\n') % rev)
+        for rev in repo.revs(b'%ld and merge()', revs):
+            ui.warn(_(b'skipping ungraftable merge revision %d\n') % rev)
             skipped.add(rev)
     revs = [r for r in revs if r not in skipped]
     if not revs:
         return -1
     if basectx is not None and len(revs) != 1:
-        raise error.Abort(_('only one revision allowed with --base '))
+        raise error.Abort(_(b'only one revision allowed with --base '))
 
     # Don't check in the --continue case, in effect retaining --force across
     # --continues. That's because without --force, any revisions we decided to
@@ -3025,16 +3089,16 @@
     # way to the graftstate. With --force, any revisions we would have otherwise
     # skipped would not have been filtered out, and if they hadn't been applied
     # already, they'd have been in the graftstate.
-    if not (cont or opts.get('force')) and basectx is None:
+    if not (cont or opts.get(b'force')) and basectx is None:
         # check for ancestors of dest branch
-        crev = repo['.'].rev()
+        crev = repo[b'.'].rev()
         ancestors = repo.changelog.ancestors([crev], inclusive=True)
         # XXX make this lazy in the future
         # don't mutate while iterating, create a copy
         for rev in list(revs):
             if rev in ancestors:
                 ui.warn(
-                    _('skipping ancestor revision %d:%s\n') % (rev, repo[rev])
+                    _(b'skipping ancestor revision %d:%s\n') % (rev, repo[rev])
                 )
                 # XXX remove on list is slow
                 revs.remove(rev)
@@ -3043,20 +3107,20 @@
 
         # analyze revs for earlier grafts
         ids = {}
-        for ctx in repo.set("%ld", revs):
+        for ctx in repo.set(b"%ld", revs):
             ids[ctx.hex()] = ctx.rev()
-            n = ctx.extra().get('source')
+            n = ctx.extra().get(b'source')
             if n:
                 ids[n] = ctx.rev()
 
         # check ancestors for earlier grafts
-        ui.debug('scanning for duplicate grafts\n')
+        ui.debug(b'scanning for duplicate grafts\n')
 
         # The only changesets we can be sure doesn't contain grafts of any
         # revs, are the ones that are common ancestors of *all* revs:
-        for rev in repo.revs('only(%d,ancestor(%ld))', crev, revs):
+        for rev in repo.revs(b'only(%d,ancestor(%ld))', crev, revs):
             ctx = repo[rev]
-            n = ctx.extra().get('source')
+            n = ctx.extra().get(b'source')
             if n in ids:
                 try:
                     r = repo[n].rev()
@@ -3065,8 +3129,8 @@
                 if r in revs:
                     ui.warn(
                         _(
-                            'skipping revision %d:%s '
-                            '(already grafted to %d:%s)\n'
+                            b'skipping revision %d:%s '
+                            b'(already grafted to %d:%s)\n'
                         )
                         % (r, repo[r], rev, ctx)
                     )
@@ -3075,16 +3139,16 @@
                     if r is None:
                         ui.warn(
                             _(
-                                'skipping already grafted revision %d:%s '
-                                '(%d:%s also has unknown origin %s)\n'
+                                b'skipping already grafted revision %d:%s '
+                                b'(%d:%s also has unknown origin %s)\n'
                             )
                             % (ids[n], repo[ids[n]], rev, ctx, n[:12])
                         )
                     else:
                         ui.warn(
                             _(
-                                'skipping already grafted revision %d:%s '
-                                '(%d:%s also has origin %d:%s)\n'
+                                b'skipping already grafted revision %d:%s '
+                                b'(%d:%s also has origin %d:%s)\n'
                             )
                             % (ids[n], repo[ids[n]], rev, ctx, r, n[:12])
                         )
@@ -3094,8 +3158,8 @@
                 if r in revs:
                     ui.warn(
                         _(
-                            'skipping already grafted revision %d:%s '
-                            '(was grafted from %d:%s)\n'
+                            b'skipping already grafted revision %d:%s '
+                            b'(was grafted from %d:%s)\n'
                         )
                         % (r, repo[r], rev, ctx)
                     )
@@ -3103,78 +3167,78 @@
         if not revs:
             return -1
 
-    if opts.get('no_commit'):
-        statedata['no_commit'] = True
-    for pos, ctx in enumerate(repo.set("%ld", revs)):
-        desc = '%d:%s "%s"' % (
+    if opts.get(b'no_commit'):
+        statedata[b'no_commit'] = True
+    for pos, ctx in enumerate(repo.set(b"%ld", revs)):
+        desc = b'%d:%s "%s"' % (
             ctx.rev(),
             ctx,
-            ctx.description().split('\n', 1)[0],
+            ctx.description().split(b'\n', 1)[0],
         )
         names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
         if names:
-            desc += ' (%s)' % ' '.join(names)
-        ui.status(_('grafting %s\n') % desc)
-        if opts.get('dry_run'):
+            desc += b' (%s)' % b' '.join(names)
+        ui.status(_(b'grafting %s\n') % desc)
+        if opts.get(b'dry_run'):
             continue
 
-        source = ctx.extra().get('source')
+        source = ctx.extra().get(b'source')
         extra = {}
         if source:
-            extra['source'] = source
-            extra['intermediate-source'] = ctx.hex()
+            extra[b'source'] = source
+            extra[b'intermediate-source'] = ctx.hex()
         else:
-            extra['source'] = ctx.hex()
+            extra[b'source'] = ctx.hex()
         user = ctx.user()
-        if opts.get('user'):
-            user = opts['user']
-            statedata['user'] = user
+        if opts.get(b'user'):
+            user = opts[b'user']
+            statedata[b'user'] = user
         date = ctx.date()
-        if opts.get('date'):
-            date = opts['date']
-            statedata['date'] = date
+        if opts.get(b'date'):
+            date = opts[b'date']
+            statedata[b'date'] = date
         message = ctx.description()
-        if opts.get('log'):
-            message += '\n(grafted from %s)' % ctx.hex()
-            statedata['log'] = True
+        if opts.get(b'log'):
+            message += b'\n(grafted from %s)' % ctx.hex()
+            statedata[b'log'] = True
 
         # we don't merge the first commit when continuing
         if not cont:
             # perform the graft merge with p1(rev) as 'ancestor'
-            overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
+            overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
             base = ctx.p1() if basectx is None else basectx
-            with ui.configoverride(overrides, 'graft'):
-                stats = mergemod.graft(repo, ctx, base, ['local', 'graft'])
+            with ui.configoverride(overrides, b'graft'):
+                stats = mergemod.graft(repo, ctx, base, [b'local', b'graft'])
             # report any conflicts
             if stats.unresolvedcount > 0:
                 # write out state for --continue
                 nodes = [repo[rev].hex() for rev in revs[pos:]]
-                statedata['nodes'] = nodes
+                statedata[b'nodes'] = nodes
                 stateversion = 1
                 graftstate.save(stateversion, statedata)
-                hint = _("use 'hg resolve' and 'hg graft --continue'")
+                hint = _(b"use 'hg resolve' and 'hg graft --continue'")
                 raise error.Abort(
-                    _("unresolved conflicts, can't continue"), hint=hint
+                    _(b"unresolved conflicts, can't continue"), hint=hint
                 )
         else:
             cont = False
 
         # commit if --no-commit is false
-        if not opts.get('no_commit'):
+        if not opts.get(b'no_commit'):
             node = repo.commit(
                 text=message, user=user, date=date, extra=extra, editor=editor
             )
             if node is None:
                 ui.warn(
-                    _('note: graft of %d:%s created no changes to commit\n')
+                    _(b'note: graft of %d:%s created no changes to commit\n')
                     % (ctx.rev(), ctx)
                 )
             # checking that newnodes exist because old state files won't have it
-            elif statedata.get('newnodes') is not None:
-                statedata['newnodes'].append(node)
+            elif statedata.get(b'newnodes') is not None:
+                statedata[b'newnodes'].append(node)
 
     # remove state when we complete successfully
-    if not opts.get('dry_run'):
+    if not opts.get(b'dry_run'):
         graftstate.delete()
 
     return 0
@@ -3183,76 +3247,79 @@
 def _stopgraft(ui, repo, graftstate):
     """stop the interrupted graft"""
     if not graftstate.exists():
-        raise error.Abort(_("no interrupted graft found"))
-    pctx = repo['.']
+        raise error.Abort(_(b"no interrupted graft found"))
+    pctx = repo[b'.']
     hg.updaterepo(repo, pctx.node(), overwrite=True)
     graftstate.delete()
-    ui.status(_("stopped the interrupted graft\n"))
-    ui.status(_("working directory is now at %s\n") % pctx.hex()[:12])
+    ui.status(_(b"stopped the interrupted graft\n"))
+    ui.status(_(b"working directory is now at %s\n") % pctx.hex()[:12])
     return 0
 
 
 statemod.addunfinished(
-    'graft',
-    fname='graftstate',
+    b'graft',
+    fname=b'graftstate',
     clearable=True,
     stopflag=True,
     continueflag=True,
     abortfunc=cmdutil.hgabortgraft,
-    cmdhint=_("use 'hg graft --continue' or 'hg graft --stop' to stop"),
+    cmdhint=_(b"use 'hg graft --continue' or 'hg graft --stop' to stop"),
 )
 
 
 @command(
-    'grep',
+    b'grep',
     [
-        ('0', 'print0', None, _('end fields with NUL')),
-        ('', 'all', None, _('print all revisions that match (DEPRECATED) ')),
+        (b'0', b'print0', None, _(b'end fields with NUL')),
+        (b'', b'all', None, _(b'print all revisions that match (DEPRECATED) ')),
         (
-            '',
-            'diff',
-            None,
-            _('print all revisions when the term was introduced ' 'or removed'),
-        ),
-        ('a', 'text', None, _('treat all files as text')),
-        (
-            'f',
-            'follow',
+            b'',
+            b'diff',
             None,
             _(
-                'follow changeset history,'
-                ' or file history across copies and renames'
+                b'print all revisions when the term was introduced '
+                b'or removed'
+            ),
+        ),
+        (b'a', b'text', None, _(b'treat all files as text')),
+        (
+            b'f',
+            b'follow',
+            None,
+            _(
+                b'follow changeset history,'
+                b' or file history across copies and renames'
             ),
         ),
-        ('i', 'ignore-case', None, _('ignore case when matching')),
+        (b'i', b'ignore-case', None, _(b'ignore case when matching')),
         (
-            'l',
-            'files-with-matches',
+            b'l',
+            b'files-with-matches',
             None,
-            _('print only filenames and revisions that match'),
+            _(b'print only filenames and revisions that match'),
         ),
-        ('n', 'line-number', None, _('print matching line numbers')),
+        (b'n', b'line-number', None, _(b'print matching line numbers')),
         (
-            'r',
-            'rev',
+            b'r',
+            b'rev',
             [],
-            _('only search files changed within revision range'),
-            _('REV'),
+            _(b'only search files changed within revision range'),
+            _(b'REV'),
         ),
         (
-            '',
-            'all-files',
+            b'',
+            b'all-files',
             None,
             _(
-                'include all files in the changeset while grepping (EXPERIMENTAL)'
+                b'include all files in the changeset while grepping (EXPERIMENTAL)'
             ),
         ),
-        ('u', 'user', None, _('list the author (long with -v)')),
-        ('d', 'date', None, _('list the date (short with -q)')),
+        (b'u', b'user', None, _(b'list the author (long with -v)')),
+        (b'd', b'date', None, _(b'list the date (short with -q)')),
     ]
     + formatteropts
     + walkopts,
-    _('[OPTION]... PATTERN [FILE]...'),
+    _(b'[OPTION]... PATTERN [FILE]...'),
     helpcategory=command.CATEGORY_FILE_CONTENTS,
     inferrepo=True,
     intents={INTENT_READONLY},
@@ -3299,29 +3366,31 @@
     Returns 0 if a match is found, 1 otherwise.
     """
     opts = pycompat.byteskwargs(opts)
-    diff = opts.get('all') or opts.get('diff')
-    all_files = opts.get('all_files')
-    if diff and opts.get('all_files'):
-        raise error.Abort(_('--diff and --all-files are mutually exclusive'))
+    diff = opts.get(b'all') or opts.get(b'diff')
+    all_files = opts.get(b'all_files')
+    if diff and opts.get(b'all_files'):
+        raise error.Abort(_(b'--diff and --all-files are mutually exclusive'))
     # TODO: remove "not opts.get('rev')" if --all-files -rMULTIREV gets working
-    if opts.get('all_files') is None and not opts.get('rev') and not diff:
+    if opts.get(b'all_files') is None and not opts.get(b'rev') and not diff:
         # experimental config: commands.grep.all-files
-        opts['all_files'] = ui.configbool('commands', 'grep.all-files')
-    plaingrep = opts.get('all_files') and not opts.get('rev')
+        opts[b'all_files'] = ui.configbool(b'commands', b'grep.all-files')
+    plaingrep = opts.get(b'all_files') and not opts.get(b'rev')
     if plaingrep:
-        opts['rev'] = ['wdir()']
+        opts[b'rev'] = [b'wdir()']
 
     reflags = re.M
-    if opts.get('ignore_case'):
+    if opts.get(b'ignore_case'):
         reflags |= re.I
     try:
         regexp = util.re.compile(pattern, reflags)
     except re.error as inst:
-        ui.warn(_("grep: invalid match pattern: %s\n") % pycompat.bytestr(inst))
+        ui.warn(
+            _(b"grep: invalid match pattern: %s\n") % pycompat.bytestr(inst)
+        )
         return 1
-    sep, eol = ':', '\n'
-    if opts.get('print0'):
-        sep = eol = '\0'
+    sep, eol = b':', b'\n'
+    if opts.get(b'print0'):
+        sep = eol = b'\0'
 
     getfile = util.lrucachefunc(repo.file)
 
@@ -3333,9 +3402,9 @@
             if not match:
                 break
             mstart, mend = match.span()
-            linenum += body.count('\n', begin, mstart) + 1
-            lstart = body.rfind('\n', begin, mstart) + 1 or begin
-            begin = body.find('\n', mend) + 1 or len(body) + 1
+            linenum += body.count(b'\n', begin, mstart) + 1
+            lstart = body.rfind(b'\n', begin, mstart) + 1 or begin
+            begin = body.find(b'\n', mend) + 1 or len(body) + 1
             lend = begin - 1
             yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
 
@@ -3378,15 +3447,15 @@
         for tag, alo, ahi, blo, bhi in sm.get_opcodes():
             if tag == r'insert':
                 for i in pycompat.xrange(blo, bhi):
-                    yield ('+', b[i])
+                    yield (b'+', b[i])
             elif tag == r'delete':
                 for i in pycompat.xrange(alo, ahi):
-                    yield ('-', a[i])
+                    yield (b'-', a[i])
             elif tag == r'replace':
                 for i in pycompat.xrange(alo, ahi):
-                    yield ('-', a[i])
+                    yield (b'-', a[i])
                 for i in pycompat.xrange(blo, bhi):
-                    yield ('+', b[i])
+                    yield (b'+', b[i])
 
     uipathfn = scmutil.getuipathfn(repo)
 
@@ -3397,9 +3466,9 @@
         else:
             formatuser = pycompat.bytestr
         if ui.quiet:
-            datefmt = '%Y-%m-%d'
+            datefmt = b'%Y-%m-%d'
         else:
-            datefmt = '%a %b %d %H:%M:%S %Y %1%2'
+            datefmt = b'%a %b %d %H:%M:%S %Y %1%2'
         found = False
 
         @util.cachefunc
@@ -3410,64 +3479,72 @@
             except error.WdirUnsupported:
                 return ctx[fn].isbinary()
 
-        fieldnamemap = {'linenumber': 'lineno'}
+        fieldnamemap = {b'linenumber': b'lineno'}
         if diff:
             iter = difflinestates(pstates, states)
         else:
-            iter = [('', l) for l in states]
+            iter = [(b'', l) for l in states]
         for change, l in iter:
             fm.startitem()
             fm.context(ctx=ctx)
             fm.data(node=fm.hexfunc(scmutil.binnode(ctx)), path=fn)
-            fm.plain(uipathfn(fn), label='grep.filename')
+            fm.plain(uipathfn(fn), label=b'grep.filename')
 
             cols = [
-                ('rev', '%d', rev, not plaingrep, ''),
-                ('linenumber', '%d', l.linenum, opts.get('line_number'), ''),
+                (b'rev', b'%d', rev, not plaingrep, b''),
+                (
+                    b'linenumber',
+                    b'%d',
+                    l.linenum,
+                    opts.get(b'line_number'),
+                    b'',
+                ),
             ]
             if diff:
                 cols.append(
                     (
-                        'change',
-                        '%s',
+                        b'change',
+                        b'%s',
                         change,
                         True,
-                        'grep.inserted ' if change == '+' else 'grep.deleted ',
+                        b'grep.inserted '
+                        if change == b'+'
+                        else b'grep.deleted ',
                     )
                 )
             cols.extend(
                 [
                     (
-                        'user',
-                        '%s',
+                        b'user',
+                        b'%s',
                         formatuser(ctx.user()),
-                        opts.get('user'),
-                        '',
+                        opts.get(b'user'),
+                        b'',
                     ),
                     (
-                        'date',
-                        '%s',
+                        b'date',
+                        b'%s',
                         fm.formatdate(ctx.date(), datefmt),
-                        opts.get('date'),
-                        '',
+                        opts.get(b'date'),
+                        b'',
                     ),
                 ]
             )
             for name, fmt, data, cond, extra_label in cols:
                 if cond:
-                    fm.plain(sep, label='grep.sep')
+                    fm.plain(sep, label=b'grep.sep')
                 field = fieldnamemap.get(name, name)
-                label = extra_label + ('grep.%s' % name)
+                label = extra_label + (b'grep.%s' % name)
                 fm.condwrite(cond, field, fmt, data, label=label)
-            if not opts.get('files_with_matches'):
-                fm.plain(sep, label='grep.sep')
-                if not opts.get('text') and binary():
-                    fm.plain(_(" Binary file matches"))
+            if not opts.get(b'files_with_matches'):
+                fm.plain(sep, label=b'grep.sep')
+                if not opts.get(b'text') and binary():
+                    fm.plain(_(b" Binary file matches"))
                 else:
-                    displaymatches(fm.nested('texts', tmpl='{text}'), l)
+                    displaymatches(fm.nested(b'texts', tmpl=b'{text}'), l)
             fm.plain(eol)
             found = True
-            if opts.get('files_with_matches'):
+            if opts.get(b'files_with_matches'):
                 break
         return found
 
@@ -3476,15 +3553,15 @@
         for s, e in l.findpos():
             if p < s:
                 fm.startitem()
-                fm.write('text', '%s', l.line[p:s])
+                fm.write(b'text', b'%s', l.line[p:s])
                 fm.data(matched=False)
             fm.startitem()
-            fm.write('text', '%s', l.line[s:e], label='grep.match')
+            fm.write(b'text', b'%s', l.line[s:e], label=b'grep.match')
             fm.data(matched=True)
             p = e
         if p < len(l.line):
             fm.startitem()
-            fm.write('text', '%s', l.line[p:])
+            fm.write(b'text', b'%s', l.line[p:])
             fm.data(matched=False)
         fm.end()
 
@@ -3492,7 +3569,7 @@
     revfiles = {}
     match = scmutil.match(repo[None], pats, opts)
     found = False
-    follow = opts.get('follow')
+    follow = opts.get(b'follow')
 
     getrenamed = scmutil.getrenamedfn(repo)
 
@@ -3536,8 +3613,8 @@
                 except error.LookupError:
                     pass
 
-    ui.pager('grep')
-    fm = ui.formatter('grep', opts)
+    ui.pager(b'grep')
+    fm = ui.formatter(b'grep', opts)
     for ctx in cmdutil.walkchangerevs(repo, match, opts, prep):
         rev = ctx.rev()
         parent = ctx.p1().rev()
@@ -3567,21 +3644,26 @@
 
 
 @command(
-    'heads',
+    b'heads',
     [
         (
-            'r',
-            'rev',
-            '',
-            _('show only heads which are descendants of STARTREV'),
-            _('STARTREV'),
+            b'r',
+            b'rev',
+            b'',
+            _(b'show only heads which are descendants of STARTREV'),
+            _(b'STARTREV'),
         ),
-        ('t', 'topo', False, _('show topological heads only')),
-        ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
-        ('c', 'closed', False, _('show normal and closed branch heads')),
+        (b't', b'topo', False, _(b'show topological heads only')),
+        (
+            b'a',
+            b'active',
+            False,
+            _(b'show active branchheads only (DEPRECATED)'),
+        ),
+        (b'c', b'closed', False, _(b'show normal and closed branch heads')),
     ]
     + templateopts,
-    _('[-ct] [-r STARTREV] [REV]...'),
+    _(b'[-ct] [-r STARTREV] [REV]...'),
     helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
     intents={INTENT_READONLY},
 )
@@ -3612,17 +3694,17 @@
 
     opts = pycompat.byteskwargs(opts)
     start = None
-    rev = opts.get('rev')
+    rev = opts.get(b'rev')
     if rev:
-        repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
+        repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
         start = scmutil.revsingle(repo, rev, None).node()
 
-    if opts.get('topo'):
+    if opts.get(b'topo'):
         heads = [repo[h] for h in repo.heads(start)]
     else:
         heads = []
         for branch in repo.branchmap():
-            heads += repo.branchheads(branch, start, opts.get('closed'))
+            heads += repo.branchheads(branch, start, opts.get(b'closed'))
         heads = [repo[h] for h in heads]
 
     if branchrevs:
@@ -3631,23 +3713,23 @@
         )
         heads = [h for h in heads if h.branch() in branches]
 
-    if opts.get('active') and branchrevs:
+    if opts.get(b'active') and branchrevs:
         dagheads = repo.heads(start)
         heads = [h for h in heads if h.node() in dagheads]
 
     if branchrevs:
         haveheads = set(h.branch() for h in heads)
         if branches - haveheads:
-            headless = ', '.join(b for b in branches - haveheads)
-            msg = _('no open branch heads found on branches %s')
-            if opts.get('rev'):
-                msg += _(' (started at %s)') % opts['rev']
-            ui.warn((msg + '\n') % headless)
+            headless = b', '.join(b for b in branches - haveheads)
+            msg = _(b'no open branch heads found on branches %s')
+            if opts.get(b'rev'):
+                msg += _(b' (started at %s)') % opts[b'rev']
+            ui.warn((msg + b'\n') % headless)
 
     if not heads:
         return 1
 
-    ui.pager('heads')
+    ui.pager(b'heads')
     heads = sorted(heads, key=lambda x: -(x.rev()))
     displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
     for ctx in heads:
@@ -3656,20 +3738,20 @@
 
 
 @command(
-    'help',
+    b'help',
     [
-        ('e', 'extension', None, _('show only help for extensions')),
-        ('c', 'command', None, _('show only help for commands')),
-        ('k', 'keyword', None, _('show topics matching keyword')),
+        (b'e', b'extension', None, _(b'show only help for extensions')),
+        (b'c', b'command', None, _(b'show only help for commands')),
+        (b'k', b'keyword', None, _(b'show topics matching keyword')),
         (
-            's',
-            'system',
+            b's',
+            b'system',
             [],
-            _('show help for specific platform(s)'),
-            _('PLATFORM'),
+            _(b'show help for specific platform(s)'),
+            _(b'PLATFORM'),
         ),
     ],
-    _('[-eck] [-s PLATFORM] [TOPIC]'),
+    _(b'[-eck] [-s PLATFORM] [TOPIC]'),
     helpcategory=command.CATEGORY_HELP,
     norepo=True,
     intents={INTENT_READONLY},
@@ -3687,37 +3769,37 @@
 
     keep = opts.get(r'system') or []
     if len(keep) == 0:
-        if pycompat.sysplatform.startswith('win'):
-            keep.append('windows')
-        elif pycompat.sysplatform == 'OpenVMS':
-            keep.append('vms')
-        elif pycompat.sysplatform == 'plan9':
-            keep.append('plan9')
+        if pycompat.sysplatform.startswith(b'win'):
+            keep.append(b'windows')
+        elif pycompat.sysplatform == b'OpenVMS':
+            keep.append(b'vms')
+        elif pycompat.sysplatform == b'plan9':
+            keep.append(b'plan9')
         else:
-            keep.append('unix')
+            keep.append(b'unix')
             keep.append(pycompat.sysplatform.lower())
     if ui.verbose:
-        keep.append('verbose')
+        keep.append(b'verbose')
 
     commands = sys.modules[__name__]
     formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
-    ui.pager('help')
+    ui.pager(b'help')
     ui.write(formatted)
 
 
 @command(
-    'identify|id',
+    b'identify|id',
     [
-        ('r', 'rev', '', _('identify the specified revision'), _('REV')),
-        ('n', 'num', None, _('show local revision number')),
-        ('i', 'id', None, _('show global revision id')),
-        ('b', 'branch', None, _('show branch')),
-        ('t', 'tags', None, _('show tags')),
-        ('B', 'bookmarks', None, _('show bookmarks')),
+        (b'r', b'rev', b'', _(b'identify the specified revision'), _(b'REV')),
+        (b'n', b'num', None, _(b'show local revision number')),
+        (b'i', b'id', None, _(b'show global revision id')),
+        (b'b', b'branch', None, _(b'show branch')),
+        (b't', b'tags', None, _(b'show tags')),
+        (b'B', b'bookmarks', None, _(b'show bookmarks')),
     ]
     + remoteopts
     + formatteropts,
-    _('[-nibtB] [-r REV] [SOURCE]'),
+    _(b'[-nibtB] [-r REV] [SOURCE]'),
     helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
     optionalrepo=True,
     intents={INTENT_READONLY},
@@ -3783,7 +3865,7 @@
     opts = pycompat.byteskwargs(opts)
     if not repo and not source:
         raise error.Abort(
-            _("there is no Mercurial repository here " "(.hg not found)")
+            _(b"there is no Mercurial repository here " b"(.hg not found)")
         )
 
     default = not (num or id or branch or tags or bookmarks)
@@ -3796,18 +3878,18 @@
         repo = peer.local()
         revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
 
-    fm = ui.formatter('identify', opts)
+    fm = ui.formatter(b'identify', opts)
     fm.startitem()
 
     if not repo:
         if num or branch or tags:
             raise error.Abort(
-                _("can't query remote revision number, branch, or tags")
+                _(b"can't query remote revision number, branch, or tags")
             )
         if not rev and revs:
             rev = revs[0]
         if not rev:
-            rev = "tip"
+            rev = b"tip"
 
         remoterev = peer.lookup(rev)
         hexrev = fm.hexfunc(remoterev)
@@ -3819,11 +3901,11 @@
         def getbms():
             bms = []
 
-            if 'bookmarks' in peer.listkeys('namespaces'):
+            if b'bookmarks' in peer.listkeys(b'namespaces'):
                 hexremoterev = hex(remoterev)
                 bms = [
                     bm
-                    for bm, bmr in peer.listkeys('bookmarks').iteritems()
+                    for bm, bmr in peer.listkeys(b'bookmarks').iteritems()
                     if bmr == hexremoterev
                 ]
 
@@ -3834,16 +3916,16 @@
                 output.extend(getbms())
             elif default and not ui.quiet:
                 # multiple bookmarks for a single parent separated by '/'
-                bm = '/'.join(getbms())
+                bm = b'/'.join(getbms())
                 if bm:
                     output.append(bm)
         else:
             fm.data(node=hex(remoterev))
-            if bookmarks or 'bookmarks' in fm.datahint():
-                fm.data(bookmarks=fm.formatlist(getbms(), name='bookmark'))
+            if bookmarks or b'bookmarks' in fm.datahint():
+                fm.data(bookmarks=fm.formatlist(getbms(), name=b'bookmark'))
     else:
         if rev:
-            repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
+            repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
         ctx = scmutil.revsingle(repo, rev, None)
 
         if ctx.rev() is None:
@@ -3853,23 +3935,23 @@
             for p in parents:
                 taglist.extend(p.tags())
 
-            dirty = ""
+            dirty = b""
             if ctx.dirty(missing=True, merge=False, branch=False):
-                dirty = '+'
+                dirty = b'+'
             fm.data(dirty=dirty)
 
             hexoutput = [fm.hexfunc(p.node()) for p in parents]
             if default or id:
-                output = ["%s%s" % ('+'.join(hexoutput), dirty)]
-            fm.data(id="%s%s" % ('+'.join(hexoutput), dirty))
+                output = [b"%s%s" % (b'+'.join(hexoutput), dirty)]
+            fm.data(id=b"%s%s" % (b'+'.join(hexoutput), dirty))
 
             if num:
-                numoutput = ["%d" % p.rev() for p in parents]
-                output.append("%s%s" % ('+'.join(numoutput), dirty))
+                numoutput = [b"%d" % p.rev() for p in parents]
+                output.append(b"%s%s" % (b'+'.join(numoutput), dirty))
 
             fm.data(
                 parents=fm.formatlist(
-                    [fm.hexfunc(p.node()) for p in parents], name='node'
+                    [fm.hexfunc(p.node()) for p in parents], name=b'node'
                 )
             )
         else:
@@ -3884,16 +3966,16 @@
 
         if default and not ui.quiet:
             b = ctx.branch()
-            if b != 'default':
-                output.append("(%s)" % b)
+            if b != b'default':
+                output.append(b"(%s)" % b)
 
             # multiple tags for a single parent separated by '/'
-            t = '/'.join(taglist)
+            t = b'/'.join(taglist)
             if t:
                 output.append(t)
 
             # multiple bookmarks for a single parent separated by '/'
-            bm = '/'.join(ctx.bookmarks())
+            bm = b'/'.join(ctx.bookmarks())
             if bm:
                 output.append(bm)
         else:
@@ -3908,61 +3990,61 @@
 
         fm.data(node=ctx.hex())
         fm.data(branch=ctx.branch())
-        fm.data(tags=fm.formatlist(taglist, name='tag', sep=':'))
-        fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name='bookmark'))
+        fm.data(tags=fm.formatlist(taglist, name=b'tag', sep=b':'))
+        fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark'))
         fm.context(ctx=ctx)
 
-    fm.plain("%s\n" % ' '.join(output))
+    fm.plain(b"%s\n" % b' '.join(output))
     fm.end()
 
 
 @command(
-    'import|patch',
+    b'import|patch',
     [
         (
-            'p',
-            'strip',
+            b'p',
+            b'strip',
             1,
             _(
-                'directory strip option for patch. This has the same '
-                'meaning as the corresponding patch option'
+                b'directory strip option for patch. This has the same '
+                b'meaning as the corresponding patch option'
             ),
-            _('NUM'),
+            _(b'NUM'),
         ),
-        ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
-        ('e', 'edit', False, _('invoke editor on commit messages')),
+        (b'b', b'base', b'', _(b'base path (DEPRECATED)'), _(b'PATH')),
+        (b'e', b'edit', False, _(b'invoke editor on commit messages')),
         (
-            'f',
-            'force',
+            b'f',
+            b'force',
             None,
-            _('skip check for outstanding uncommitted changes (DEPRECATED)'),
+            _(b'skip check for outstanding uncommitted changes (DEPRECATED)'),
         ),
         (
-            '',
-            'no-commit',
+            b'',
+            b'no-commit',
             None,
-            _("don't commit, just update the working directory"),
+            _(b"don't commit, just update the working directory"),
         ),
         (
-            '',
-            'bypass',
+            b'',
+            b'bypass',
             None,
-            _("apply patch without touching the working directory"),
+            _(b"apply patch without touching the working directory"),
         ),
-        ('', 'partial', None, _('commit even if some hunks fail')),
-        ('', 'exact', None, _('abort if patch would apply lossily')),
-        ('', 'prefix', '', _('apply patch to subdirectory'), _('DIR')),
+        (b'', b'partial', None, _(b'commit even if some hunks fail')),
+        (b'', b'exact', None, _(b'abort if patch would apply lossily')),
+        (b'', b'prefix', b'', _(b'apply patch to subdirectory'), _(b'DIR')),
         (
-            '',
-            'import-branch',
+            b'',
+            b'import-branch',
             None,
-            _('use any branch information in patch (implied by --exact)'),
+            _(b'use any branch information in patch (implied by --exact)'),
         ),
     ]
     + commitopts
     + commitopts2
     + similarityopts,
-    _('[OPTION]... PATCH...'),
+    _(b'[OPTION]... PATCH...'),
     helpcategory=command.CATEGORY_IMPORT_EXPORT,
 )
 def import_(ui, repo, patch1=None, *patches, **opts):
@@ -4066,60 +4148,60 @@
 
     opts = pycompat.byteskwargs(opts)
     if not patch1:
-        raise error.Abort(_('need at least one patch to import'))
+        raise error.Abort(_(b'need at least one patch to import'))
 
     patches = (patch1,) + patches
 
-    date = opts.get('date')
+    date = opts.get(b'date')
     if date:
-        opts['date'] = dateutil.parsedate(date)
-
-    exact = opts.get('exact')
-    update = not opts.get('bypass')
-    if not update and opts.get('no_commit'):
-        raise error.Abort(_('cannot use --no-commit with --bypass'))
+        opts[b'date'] = dateutil.parsedate(date)
+
+    exact = opts.get(b'exact')
+    update = not opts.get(b'bypass')
+    if not update and opts.get(b'no_commit'):
+        raise error.Abort(_(b'cannot use --no-commit with --bypass'))
     try:
-        sim = float(opts.get('similarity') or 0)
+        sim = float(opts.get(b'similarity') or 0)
     except ValueError:
-        raise error.Abort(_('similarity must be a number'))
+        raise error.Abort(_(b'similarity must be a number'))
     if sim < 0 or sim > 100:
-        raise error.Abort(_('similarity must be between 0 and 100'))
+        raise error.Abort(_(b'similarity must be between 0 and 100'))
     if sim and not update:
-        raise error.Abort(_('cannot use --similarity with --bypass'))
+        raise error.Abort(_(b'cannot use --similarity with --bypass'))
     if exact:
-        if opts.get('edit'):
-            raise error.Abort(_('cannot use --exact with --edit'))
-        if opts.get('prefix'):
-            raise error.Abort(_('cannot use --exact with --prefix'))
-
-    base = opts["base"]
+        if opts.get(b'edit'):
+            raise error.Abort(_(b'cannot use --exact with --edit'))
+        if opts.get(b'prefix'):
+            raise error.Abort(_(b'cannot use --exact with --prefix'))
+
+    base = opts[b"base"]
     msgs = []
     ret = 0
 
     with repo.wlock():
         if update:
             cmdutil.checkunfinished(repo)
-            if exact or not opts.get('force'):
+            if exact or not opts.get(b'force'):
                 cmdutil.bailifchanged(repo)
 
-        if not opts.get('no_commit'):
+        if not opts.get(b'no_commit'):
             lock = repo.lock
-            tr = lambda: repo.transaction('import')
+            tr = lambda: repo.transaction(b'import')
             dsguard = util.nullcontextmanager
         else:
             lock = util.nullcontextmanager
             tr = util.nullcontextmanager
-            dsguard = lambda: dirstateguard.dirstateguard(repo, 'import')
+            dsguard = lambda: dirstateguard.dirstateguard(repo, b'import')
         with lock(), tr(), dsguard():
             parents = repo[None].parents()
             for patchurl in patches:
-                if patchurl == '-':
-                    ui.status(_('applying patch from stdin\n'))
+                if patchurl == b'-':
+                    ui.status(_(b'applying patch from stdin\n'))
                     patchfile = ui.fin
-                    patchurl = 'stdin'  # for error message
+                    patchurl = b'stdin'  # for error message
                 else:
                     patchurl = os.path.join(base, patchurl)
-                    ui.status(_('applying %s\n') % patchurl)
+                    ui.status(_(b'applying %s\n') % patchurl)
                     patchfile = hg.openpath(ui, patchurl, sendaccept=False)
 
                 haspatch = False
@@ -4130,59 +4212,64 @@
                         )
                     if msg:
                         haspatch = True
-                        ui.note(msg + '\n')
+                        ui.note(msg + b'\n')
                     if update or exact:
                         parents = repo[None].parents()
                     else:
                         parents = [repo[node]]
                     if rej:
-                        ui.write_err(_("patch applied partially\n"))
+                        ui.write_err(_(b"patch applied partially\n"))
                         ui.write_err(
                             _(
-                                "(fix the .rej files and run "
-                                "`hg commit --amend`)\n"
+                                b"(fix the .rej files and run "
+                                b"`hg commit --amend`)\n"
                             )
                         )
                         ret = 1
                         break
 
                 if not haspatch:
-                    raise error.Abort(_('%s: no diffs found') % patchurl)
+                    raise error.Abort(_(b'%s: no diffs found') % patchurl)
 
             if msgs:
-                repo.savecommitmessage('\n* * *\n'.join(msgs))
+                repo.savecommitmessage(b'\n* * *\n'.join(msgs))
         return ret
 
 
 @command(
-    'incoming|in',
+    b'incoming|in',
     [
-        ('f', 'force', None, _('run even if remote repository is unrelated')),
-        ('n', 'newest-first', None, _('show newest record first')),
-        ('', 'bundle', '', _('file to store the bundles into'), _('FILE')),
         (
-            'r',
-            'rev',
+            b'f',
+            b'force',
+            None,
+            _(b'run even if remote repository is unrelated'),
+        ),
+        (b'n', b'newest-first', None, _(b'show newest record first')),
+        (b'', b'bundle', b'', _(b'file to store the bundles into'), _(b'FILE')),
+        (
+            b'r',
+            b'rev',
             [],
-            _('a remote changeset intended to be added'),
-            _('REV'),
+            _(b'a remote changeset intended to be added'),
+            _(b'REV'),
         ),
-        ('B', 'bookmarks', False, _("compare bookmarks")),
+        (b'B', b'bookmarks', False, _(b"compare bookmarks")),
         (
-            'b',
-            'branch',
+            b'b',
+            b'branch',
             [],
-            _('a specific branch you would like to pull'),
-            _('BRANCH'),
+            _(b'a specific branch you would like to pull'),
+            _(b'BRANCH'),
         ),
     ]
     + logopts
     + remoteopts
     + subrepoopts,
-    _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'),
+    _(b'[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'),
     helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
 )
-def incoming(ui, repo, source="default", **opts):
+def incoming(ui, repo, source=b"default", **opts):
     """show new changesets found in source
 
     Show new changesets found in the specified path/URL or the default
@@ -4237,7 +4324,7 @@
     Returns 0 if there are incoming changes, 1 otherwise.
     """
     opts = pycompat.byteskwargs(opts)
-    if opts.get('graph'):
+    if opts.get(b'graph'):
         logcmdutil.checkunsupportedgraphflags([], opts)
 
         def display(other, chlist, displayer):
@@ -4249,19 +4336,19 @@
         hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
         return 0
 
-    if opts.get('bundle') and opts.get('subrepos'):
-        raise error.Abort(_('cannot combine --bundle and --subrepos'))
-
-    if opts.get('bookmarks'):
+    if opts.get(b'bundle') and opts.get(b'subrepos'):
+        raise error.Abort(_(b'cannot combine --bundle and --subrepos'))
+
+    if opts.get(b'bookmarks'):
         source, branches = hg.parseurl(
-            ui.expandpath(source), opts.get('branch')
+            ui.expandpath(source), opts.get(b'branch')
         )
         other = hg.peer(repo, opts, source)
-        if 'bookmarks' not in other.listkeys('namespaces'):
-            ui.warn(_("remote doesn't support bookmarks\n"))
+        if b'bookmarks' not in other.listkeys(b'namespaces'):
+            ui.warn(_(b"remote doesn't support bookmarks\n"))
             return 0
-        ui.pager('incoming')
-        ui.status(_('comparing with %s\n') % util.hidepassword(source))
+        ui.pager(b'incoming')
+        ui.status(_(b'comparing with %s\n') % util.hidepassword(source))
         return bookmarks.incoming(ui, repo, other)
 
     repo._subtoppath = ui.expandpath(source)
@@ -4272,14 +4359,14 @@
 
 
 @command(
-    'init',
+    b'init',
     remoteopts,
-    _('[-e CMD] [--remotecmd CMD] [DEST]'),
+    _(b'[-e CMD] [--remotecmd CMD] [DEST]'),
     helpcategory=command.CATEGORY_REPO_CREATION,
     helpbasic=True,
     norepo=True,
 )
-def init(ui, dest=".", **opts):
+def init(ui, dest=b".", **opts):
     """create a new repository in the given directory
 
     Initialize a new repository in the given directory. If the given
@@ -4297,19 +4384,30 @@
 
 
 @command(
-    'locate',
+    b'locate',
     [
-        ('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
-        ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
+        (
+            b'r',
+            b'rev',
+            b'',
+            _(b'search the repository as it is in REV'),
+            _(b'REV'),
+        ),
         (
-            'f',
-            'fullpath',
+            b'0',
+            b'print0',
             None,
-            _('print complete paths from the filesystem root'),
+            _(b'end filenames with NUL, for use with xargs'),
+        ),
+        (
+            b'f',
+            b'fullpath',
+            None,
+            _(b'print complete paths from the filesystem root'),
         ),
     ]
     + walkopts,
-    _('[OPTION]... [PATTERN]...'),
+    _(b'[OPTION]... [PATTERN]...'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
 )
 def locate(ui, repo, *pats, **opts):
@@ -4335,18 +4433,18 @@
     Returns 0 if a match is found, 1 otherwise.
     """
     opts = pycompat.byteskwargs(opts)
-    if opts.get('print0'):
-        end = '\0'
+    if opts.get(b'print0'):
+        end = b'\0'
     else:
-        end = '\n'
-    ctx = scmutil.revsingle(repo, opts.get('rev'), None)
+        end = b'\n'
+    ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
 
     ret = 1
     m = scmutil.match(
-        ctx, pats, opts, default='relglob', badfn=lambda x, y: False
+        ctx, pats, opts, default=b'relglob', badfn=lambda x, y: False
     )
 
-    ui.pager('locate')
+    ui.pager(b'locate')
     if ctx.rev() is None:
         # When run on the working copy, "locate" includes removed files, so
         # we get the list of files from the dirstate.
@@ -4355,7 +4453,7 @@
         filesgen = ctx.matches(m)
     uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=bool(pats))
     for abs in filesgen:
-        if opts.get('fullpath'):
+        if opts.get(b'fullpath'):
             ui.write(repo.wjoin(abs), end)
         else:
             ui.write(uipathfn(abs), end)
@@ -4365,74 +4463,91 @@
 
 
 @command(
-    'log|history',
+    b'log|history',
     [
         (
-            'f',
-            'follow',
+            b'f',
+            b'follow',
             None,
             _(
-                'follow changeset history, or file history across copies and renames'
+                b'follow changeset history, or file history across copies and renames'
             ),
         ),
         (
-            '',
-            'follow-first',
+            b'',
+            b'follow-first',
             None,
-            _('only follow the first parent of merge changesets (DEPRECATED)'),
+            _(b'only follow the first parent of merge changesets (DEPRECATED)'),
         ),
-        ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
-        ('C', 'copies', None, _('show copied files')),
         (
-            'k',
-            'keyword',
+            b'd',
+            b'date',
+            b'',
+            _(b'show revisions matching date spec'),
+            _(b'DATE'),
+        ),
+        (b'C', b'copies', None, _(b'show copied files')),
+        (
+            b'k',
+            b'keyword',
             [],
-            _('do case-insensitive search for a given text'),
-            _('TEXT'),
+            _(b'do case-insensitive search for a given text'),
+            _(b'TEXT'),
         ),
-        ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
         (
-            'L',
-            'line-range',
+            b'r',
+            b'rev',
             [],
-            _('follow line range of specified file (EXPERIMENTAL)'),
-            _('FILE,RANGE'),
+            _(b'show the specified revision or revset'),
+            _(b'REV'),
         ),
-        ('', 'removed', None, _('include revisions where files were removed')),
+        (
+            b'L',
+            b'line-range',
+            [],
+            _(b'follow line range of specified file (EXPERIMENTAL)'),
+            _(b'FILE,RANGE'),
+        ),
         (
-            'm',
-            'only-merges',
+            b'',
+            b'removed',
             None,
-            _('show only merges (DEPRECATED) (use -r "merge()" instead)'),
+            _(b'include revisions where files were removed'),
         ),
-        ('u', 'user', [], _('revisions committed by user'), _('USER')),
         (
-            '',
-            'only-branch',
+            b'm',
+            b'only-merges',
+            None,
+            _(b'show only merges (DEPRECATED) (use -r "merge()" instead)'),
+        ),
+        (b'u', b'user', [], _(b'revisions committed by user'), _(b'USER')),
+        (
+            b'',
+            b'only-branch',
             [],
             _(
-                'show only changesets within the given named branch (DEPRECATED)'
+                b'show only changesets within the given named branch (DEPRECATED)'
             ),
-            _('BRANCH'),
+            _(b'BRANCH'),
         ),
         (
-            'b',
-            'branch',
+            b'b',
+            b'branch',
             [],
-            _('show changesets within the given named branch'),
-            _('BRANCH'),
+            _(b'show changesets within the given named branch'),
+            _(b'BRANCH'),
         ),
         (
-            'P',
-            'prune',
+            b'P',
+            b'prune',
             [],
-            _('do not display revision or any of its ancestors'),
-            _('REV'),
+            _(b'do not display revision or any of its ancestors'),
+            _(b'REV'),
         ),
     ]
     + logopts
     + walkopts,
-    _('[OPTION]... [FILE]'),
+    _(b'[OPTION]... [FILE]'),
     helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
     helpbasic=True,
     inferrepo=True,
@@ -4567,18 +4682,18 @@
 
     """
     opts = pycompat.byteskwargs(opts)
-    linerange = opts.get('line_range')
-
-    if linerange and not opts.get('follow'):
-        raise error.Abort(_('--line-range requires --follow'))
+    linerange = opts.get(b'line_range')
+
+    if linerange and not opts.get(b'follow'):
+        raise error.Abort(_(b'--line-range requires --follow'))
 
     if linerange and pats:
         # TODO: take pats as patterns with no line-range filter
         raise error.Abort(
-            _('FILE arguments are not compatible with --line-range option')
+            _(b'FILE arguments are not compatible with --line-range option')
         )
 
-    repo = scmutil.unhidehashlikerevs(repo, opts.get('rev'), 'nowarn')
+    repo = scmutil.unhidehashlikerevs(repo, opts.get(b'rev'), b'nowarn')
     revs, differ = logcmdutil.getrevs(repo, pats, opts)
     if linerange:
         # TODO: should follow file history from logcmdutil._initialrevs(),
@@ -4586,17 +4701,17 @@
         revs, differ = logcmdutil.getlinerangerevs(repo, revs, opts)
 
     getcopies = None
-    if opts.get('copies'):
+    if opts.get(b'copies'):
         endrev = None
         if revs:
             endrev = revs.max() + 1
         getcopies = scmutil.getcopiesfn(repo, endrev=endrev)
 
-    ui.pager('log')
+    ui.pager(b'log')
     displayer = logcmdutil.changesetdisplayer(
         ui, repo, opts, differ, buffered=True
     )
-    if opts.get('graph'):
+    if opts.get(b'graph'):
         displayfn = logcmdutil.displaygraphrevs
     else:
         displayfn = logcmdutil.displayrevs
@@ -4604,13 +4719,13 @@
 
 
 @command(
-    'manifest',
+    b'manifest',
     [
-        ('r', 'rev', '', _('revision to display'), _('REV')),
-        ('', 'all', False, _("list files from all revisions")),
+        (b'r', b'rev', b'', _(b'revision to display'), _(b'REV')),
+        (b'', b'all', False, _(b"list files from all revisions")),
     ]
     + formatteropts,
-    _('[-r REV]'),
+    _(b'[-r REV]'),
     helpcategory=command.CATEGORY_MAINTENANCE,
     intents={INTENT_READONLY},
 )
@@ -4630,67 +4745,67 @@
     Returns 0 on success.
     """
     opts = pycompat.byteskwargs(opts)
-    fm = ui.formatter('manifest', opts)
-
-    if opts.get('all'):
+    fm = ui.formatter(b'manifest', opts)
+
+    if opts.get(b'all'):
         if rev or node:
-            raise error.Abort(_("can't specify a revision with --all"))
+            raise error.Abort(_(b"can't specify a revision with --all"))
 
         res = set()
         for rev in repo:
             ctx = repo[rev]
             res |= set(ctx.files())
 
-        ui.pager('manifest')
+        ui.pager(b'manifest')
         for f in sorted(res):
             fm.startitem()
-            fm.write("path", '%s\n', f)
+            fm.write(b"path", b'%s\n', f)
         fm.end()
         return
 
     if rev and node:
-        raise error.Abort(_("please specify just one revision"))
+        raise error.Abort(_(b"please specify just one revision"))
 
     if not node:
         node = rev
 
-    char = {'l': '@', 'x': '*', '': '', 't': 'd'}
-    mode = {'l': '644', 'x': '755', '': '644', 't': '755'}
+    char = {b'l': b'@', b'x': b'*', b'': b'', b't': b'd'}
+    mode = {b'l': b'644', b'x': b'755', b'': b'644', b't': b'755'}
     if node:
-        repo = scmutil.unhidehashlikerevs(repo, [node], 'nowarn')
+        repo = scmutil.unhidehashlikerevs(repo, [node], b'nowarn')
     ctx = scmutil.revsingle(repo, node)
     mf = ctx.manifest()
-    ui.pager('manifest')
+    ui.pager(b'manifest')
     for f in ctx:
         fm.startitem()
         fm.context(ctx=ctx)
         fl = ctx[f].flags()
-        fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
-        fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
-        fm.write('path', '%s\n', f)
+        fm.condwrite(ui.debugflag, b'hash', b'%s ', hex(mf[f]))
+        fm.condwrite(ui.verbose, b'mode type', b'%s %1s ', mode[fl], char[fl])
+        fm.write(b'path', b'%s\n', f)
     fm.end()
 
 
 @command(
-    'merge',
+    b'merge',
     [
         (
-            'f',
-            'force',
+            b'f',
+            b'force',
             None,
-            _('force a merge including outstanding changes (DEPRECATED)'),
+            _(b'force a merge including outstanding changes (DEPRECATED)'),
         ),
-        ('r', 'rev', '', _('revision to merge'), _('REV')),
+        (b'r', b'rev', b'', _(b'revision to merge'), _(b'REV')),
         (
-            'P',
-            'preview',
+            b'P',
+            b'preview',
             None,
-            _('review revisions to merge (no merge is performed)'),
+            _(b'review revisions to merge (no merge is performed)'),
         ),
-        ('', 'abort', None, _('abort the ongoing merge')),
+        (b'', b'abort', None, _(b'abort the ongoing merge')),
     ]
     + mergetoolopts,
-    _('[-P] [[-r] REV]'),
+    _(b'[-P] [[-r] REV]'),
     helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
     helpbasic=True,
 )
@@ -4724,26 +4839,26 @@
     """
 
     opts = pycompat.byteskwargs(opts)
-    abort = opts.get('abort')
+    abort = opts.get(b'abort')
     if abort and repo.dirstate.p2() == nullid:
-        cmdutil.wrongtooltocontinue(repo, _('merge'))
+        cmdutil.wrongtooltocontinue(repo, _(b'merge'))
     if abort:
         state = cmdutil.getunfinishedstate(repo)
-        if state and state._opname != 'merge':
+        if state and state._opname != b'merge':
             raise error.Abort(
-                _('cannot abort merge with %s in progress') % (state._opname),
+                _(b'cannot abort merge with %s in progress') % (state._opname),
                 hint=state.hint(),
             )
         if node:
-            raise error.Abort(_("cannot specify a node with --abort"))
-        if opts.get('rev'):
-            raise error.Abort(_("cannot specify both --rev and --abort"))
-        if opts.get('preview'):
-            raise error.Abort(_("cannot specify --preview with --abort"))
-    if opts.get('rev') and node:
-        raise error.Abort(_("please specify just one revision"))
+            raise error.Abort(_(b"cannot specify a node with --abort"))
+        if opts.get(b'rev'):
+            raise error.Abort(_(b"cannot specify both --rev and --abort"))
+        if opts.get(b'preview'):
+            raise error.Abort(_(b"cannot specify --preview with --abort"))
+    if opts.get(b'rev') and node:
+        raise error.Abort(_(b"please specify just one revision"))
     if not node:
-        node = opts.get('rev')
+        node = opts.get(b'rev')
 
     if node:
         node = scmutil.revsingle(repo, node).node()
@@ -4751,9 +4866,9 @@
     if not node and not abort:
         node = repo[destutil.destmerge(repo)].node()
 
-    if opts.get('preview'):
+    if opts.get(b'preview'):
         # find nodes that are ancestors of p2 but not of p1
-        p1 = repo.lookup('.')
+        p1 = repo.lookup(b'.')
         p2 = node
         nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
 
@@ -4764,10 +4879,10 @@
         return 0
 
     # ui.forcemerge is an internal variable, do not document
-    overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
-    with ui.configoverride(overrides, 'merge'):
-        force = opts.get('force')
-        labels = ['working copy', 'merge rev']
+    overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
+    with ui.configoverride(overrides, b'merge'):
+        force = opts.get(b'force')
+        labels = [b'working copy', b'merge rev']
         return hg.merge(
             repo,
             node,
@@ -4779,44 +4894,49 @@
 
 
 statemod.addunfinished(
-    'merge',
+    b'merge',
     fname=None,
     clearable=True,
     allowcommit=True,
-    cmdmsg=_('outstanding uncommitted merge'),
+    cmdmsg=_(b'outstanding uncommitted merge'),
     abortfunc=hg.abortmerge,
     statushint=_(
-        'To continue:    hg commit\n' 'To abort:       hg merge --abort'
+        b'To continue:    hg commit\n' b'To abort:       hg merge --abort'
     ),
-    cmdhint=_("use 'hg commit' or 'hg merge --abort'"),
+    cmdhint=_(b"use 'hg commit' or 'hg merge --abort'"),
 )
 
 
 @command(
-    'outgoing|out',
+    b'outgoing|out',
     [
-        ('f', 'force', None, _('run even when the destination is unrelated')),
         (
-            'r',
-            'rev',
+            b'f',
+            b'force',
+            None,
+            _(b'run even when the destination is unrelated'),
+        ),
+        (
+            b'r',
+            b'rev',
             [],
-            _('a changeset intended to be included in the destination'),
-            _('REV'),
+            _(b'a changeset intended to be included in the destination'),
+            _(b'REV'),
         ),
-        ('n', 'newest-first', None, _('show newest record first')),
-        ('B', 'bookmarks', False, _('compare bookmarks')),
+        (b'n', b'newest-first', None, _(b'show newest record first')),
+        (b'B', b'bookmarks', False, _(b'compare bookmarks')),
         (
-            'b',
-            'branch',
+            b'b',
+            b'branch',
             [],
-            _('a specific branch you would like to push'),
-            _('BRANCH'),
+            _(b'a specific branch you would like to push'),
+            _(b'BRANCH'),
         ),
     ]
     + logopts
     + remoteopts
     + subrepoopts,
-    _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'),
+    _(b'[-M] [-p] [-n] [-f] [-r REV]... [DEST]'),
     helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
 )
 def outgoing(ui, repo, dest=None, **opts):
@@ -4857,15 +4977,15 @@
     """
     # hg._outgoing() needs to re-resolve the path in order to handle #branch
     # style URLs, so don't overwrite dest.
-    path = ui.paths.getpath(dest, default=('default-push', 'default'))
+    path = ui.paths.getpath(dest, default=(b'default-push', b'default'))
     if not path:
         raise error.Abort(
-            _('default repository not configured!'),
-            hint=_("see 'hg help config.paths'"),
+            _(b'default repository not configured!'),
+            hint=_(b"see 'hg help config.paths'"),
         )
 
     opts = pycompat.byteskwargs(opts)
-    if opts.get('graph'):
+    if opts.get(b'graph'):
         logcmdutil.checkunsupportedgraphflags([], opts)
         o, other = hg._outgoing(ui, repo, dest, opts)
         if not o:
@@ -4873,7 +4993,7 @@
             return
 
         revdag = logcmdutil.graphrevs(repo, o, opts)
-        ui.pager('outgoing')
+        ui.pager(b'outgoing')
         displayer = logcmdutil.changesetdisplayer(ui, repo, opts, buffered=True)
         logcmdutil.displaygraph(
             ui, repo, revdag, displayer, graphmod.asciiedges
@@ -4881,14 +5001,14 @@
         cmdutil.outgoinghooks(ui, repo, other, opts, o)
         return 0
 
-    if opts.get('bookmarks'):
+    if opts.get(b'bookmarks'):
         dest = path.pushloc or path.loc
         other = hg.peer(repo, opts, dest)
-        if 'bookmarks' not in other.listkeys('namespaces'):
-            ui.warn(_("remote doesn't support bookmarks\n"))
+        if b'bookmarks' not in other.listkeys(b'namespaces'):
+            ui.warn(_(b"remote doesn't support bookmarks\n"))
             return 0
-        ui.status(_('comparing with %s\n') % util.hidepassword(dest))
-        ui.pager('outgoing')
+        ui.status(_(b'comparing with %s\n') % util.hidepassword(dest))
+        ui.pager(b'outgoing')
         return bookmarks.outgoing(ui, repo, other)
 
     repo._subtoppath = path.pushloc or path.loc
@@ -4899,10 +5019,18 @@
 
 
 @command(
-    'parents',
-    [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),]
+    b'parents',
+    [
+        (
+            b'r',
+            b'rev',
+            b'',
+            _(b'show parents of the specified revision'),
+            _(b'REV'),
+        ),
+    ]
     + templateopts,
-    _('[-r REV] [FILE]'),
+    _(b'[-r REV] [FILE]'),
     helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
     inferrepo=True,
 )
@@ -4928,15 +5056,15 @@
     """
 
     opts = pycompat.byteskwargs(opts)
-    rev = opts.get('rev')
+    rev = opts.get(b'rev')
     if rev:
-        repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
+        repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
     ctx = scmutil.revsingle(repo, rev, None)
 
     if file_:
         m = scmutil.match(ctx, (file_,), opts)
         if m.anypats() or len(m.files()) != 1:
-            raise error.Abort(_('can only specify an explicit filename'))
+            raise error.Abort(_(b'can only specify an explicit filename'))
         file_ = m.files()[0]
         filenodes = []
         for cp in ctx.parents():
@@ -4947,7 +5075,7 @@
             except error.LookupError:
                 pass
         if not filenodes:
-            raise error.Abort(_("'%s' not found in manifest!") % file_)
+            raise error.Abort(_(b"'%s' not found in manifest!") % file_)
         p = []
         for fn in filenodes:
             fctx = repo.filectx(file_, fileid=fn)
@@ -4963,9 +5091,9 @@
 
 
 @command(
-    'paths',
+    b'paths',
     formatteropts,
-    _('[NAME]'),
+    _(b'[NAME]'),
     helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
     optionalrepo=True,
     intents={INTENT_READONLY},
@@ -5013,7 +5141,7 @@
     """
 
     opts = pycompat.byteskwargs(opts)
-    ui.pager('paths')
+    ui.pager(b'paths')
     if search:
         pathitems = [
             (name, path)
@@ -5023,47 +5151,47 @@
     else:
         pathitems = sorted(ui.paths.iteritems())
 
-    fm = ui.formatter('paths', opts)
+    fm = ui.formatter(b'paths', opts)
     if fm.isplain():
         hidepassword = util.hidepassword
     else:
         hidepassword = bytes
     if ui.quiet:
-        namefmt = '%s\n'
+        namefmt = b'%s\n'
     else:
-        namefmt = '%s = '
+        namefmt = b'%s = '
     showsubopts = not search and not ui.quiet
 
     for name, path in pathitems:
         fm.startitem()
-        fm.condwrite(not search, 'name', namefmt, name)
-        fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
+        fm.condwrite(not search, b'name', namefmt, name)
+        fm.condwrite(not ui.quiet, b'url', b'%s\n', hidepassword(path.rawloc))
         for subopt, value in sorted(path.suboptions.items()):
-            assert subopt not in ('name', 'url')
+            assert subopt not in (b'name', b'url')
             if showsubopts:
-                fm.plain('%s:%s = ' % (name, subopt))
-            fm.condwrite(showsubopts, subopt, '%s\n', value)
+                fm.plain(b'%s:%s = ' % (name, subopt))
+            fm.condwrite(showsubopts, subopt, b'%s\n', value)
 
     fm.end()
 
     if search and not pathitems:
         if not ui.quiet:
-            ui.warn(_("not found!\n"))
+            ui.warn(_(b"not found!\n"))
         return 1
     else:
         return 0
 
 
 @command(
-    'phase',
+    b'phase',
     [
-        ('p', 'public', False, _('set changeset phase to public')),
-        ('d', 'draft', False, _('set changeset phase to draft')),
-        ('s', 'secret', False, _('set changeset phase to secret')),
-        ('f', 'force', False, _('allow to move boundary backward')),
-        ('r', 'rev', [], _('target revision'), _('REV')),
+        (b'p', b'public', False, _(b'set changeset phase to public')),
+        (b'd', b'draft', False, _(b'set changeset phase to draft')),
+        (b's', b'secret', False, _(b'set changeset phase to secret')),
+        (b'f', b'force', False, _(b'allow to move boundary backward')),
+        (b'r', b'rev', [], _(b'target revision'), _(b'REV')),
     ],
-    _('[-p|-d|-s] [-f] [-r] [REV...]'),
+    _(b'[-p|-d|-s] [-f] [-r] [REV...]'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def phase(ui, repo, *revs, **opts):
@@ -5089,12 +5217,12 @@
     for idx, name in enumerate(phases.cmdphasenames):
         if opts[name]:
             if targetphase is not None:
-                raise error.Abort(_('only one phase can be specified'))
+                raise error.Abort(_(b'only one phase can be specified'))
             targetphase = idx
 
     # look for specified revision
     revs = list(revs)
-    revs.extend(opts['rev'])
+    revs.extend(opts[b'rev'])
     if not revs:
         # display both parents as the second parent phase can influence
         # the phase of a merge commit
@@ -5107,12 +5235,12 @@
         # display
         for r in revs:
             ctx = repo[r]
-            ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
+            ui.write(b'%i: %s\n' % (ctx.rev(), ctx.phasestr()))
     else:
-        with repo.lock(), repo.transaction("phase") as tr:
+        with repo.lock(), repo.transaction(b"phase") as tr:
             # set phase
             if not revs:
-                raise error.Abort(_('empty revision set'))
+                raise error.Abort(_(b'empty revision set'))
             nodes = [repo[r].node() for r in revs]
             # moving revision from public to draft may hide them
             # We have to check result on an unfiltered repository
@@ -5120,7 +5248,7 @@
             getphase = unfi._phasecache.phase
             olddata = [getphase(unfi, r) for r in unfi]
             phases.advanceboundary(repo, tr, targetphase, nodes)
-            if opts['force']:
+            if opts[b'force']:
                 phases.retractboundary(repo, tr, targetphase, nodes)
         getphase = unfi._phasecache.phase
         newdata = [getphase(unfi, r) for r in unfi]
@@ -5130,20 +5258,20 @@
         if rejected:
             ui.warn(
                 _(
-                    'cannot move %i changesets to a higher '
-                    'phase, use --force\n'
+                    b'cannot move %i changesets to a higher '
+                    b'phase, use --force\n'
                 )
                 % len(rejected)
             )
             ret = 1
         if changes:
-            msg = _('phase changed for %i changesets\n') % changes
+            msg = _(b'phase changed for %i changesets\n') % changes
             if ret:
                 ui.status(msg)
             else:
                 ui.note(msg)
         else:
-            ui.warn(_('no phases changed\n'))
+            ui.warn(_(b'no phases changed\n'))
     return ret
 
 
@@ -5163,55 +5291,62 @@
         try:
             return hg.updatetotally(ui, repo, checkout, brev)
         except error.UpdateAbort as inst:
-            msg = _("not updating: %s") % stringutil.forcebytestr(inst)
+            msg = _(b"not updating: %s") % stringutil.forcebytestr(inst)
             hint = inst.hint
             raise error.UpdateAbort(msg, hint=hint)
     if modheads is not None and modheads > 1:
         currentbranchheads = len(repo.branchheads())
         if currentbranchheads == modheads:
-            ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
+            ui.status(
+                _(b"(run 'hg heads' to see heads, 'hg merge' to merge)\n")
+            )
         elif currentbranchheads > 1:
             ui.status(
-                _("(run 'hg heads .' to see heads, 'hg merge' to " "merge)\n")
+                _(b"(run 'hg heads .' to see heads, 'hg merge' to " b"merge)\n")
             )
         else:
-            ui.status(_("(run 'hg heads' to see heads)\n"))
-    elif not ui.configbool('commands', 'update.requiredest'):
-        ui.status(_("(run 'hg update' to get a working copy)\n"))
+            ui.status(_(b"(run 'hg heads' to see heads)\n"))
+    elif not ui.configbool(b'commands', b'update.requiredest'):
+        ui.status(_(b"(run 'hg update' to get a working copy)\n"))
 
 
 @command(
-    'pull',
+    b'pull',
     [
         (
-            'u',
-            'update',
+            b'u',
+            b'update',
             None,
-            _('update to new branch head if new descendants were pulled'),
+            _(b'update to new branch head if new descendants were pulled'),
         ),
-        ('f', 'force', None, _('run even when remote repository is unrelated')),
         (
-            'r',
-            'rev',
+            b'f',
+            b'force',
+            None,
+            _(b'run even when remote repository is unrelated'),
+        ),
+        (
+            b'r',
+            b'rev',
             [],
-            _('a remote changeset intended to be added'),
-            _('REV'),
+            _(b'a remote changeset intended to be added'),
+            _(b'REV'),
         ),
-        ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
+        (b'B', b'bookmark', [], _(b"bookmark to pull"), _(b'BOOKMARK')),
         (
-            'b',
-            'branch',
+            b'b',
+            b'branch',
             [],
-            _('a specific branch you would like to pull'),
-            _('BRANCH'),
+            _(b'a specific branch you would like to pull'),
+            _(b'BRANCH'),
         ),
     ]
     + remoteopts,
-    _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'),
+    _(b'[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'),
     helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
     helpbasic=True,
 )
-def pull(ui, repo, source="default", **opts):
+def pull(ui, repo, source=b"default", **opts):
     """pull changes from the specified source
 
     Pull changes from a remote repository to a local one.
@@ -5242,23 +5377,25 @@
     """
 
     opts = pycompat.byteskwargs(opts)
-    if ui.configbool('commands', 'update.requiredest') and opts.get('update'):
-        msg = _('update destination required by configuration')
-        hint = _('use hg pull followed by hg update DEST')
+    if ui.configbool(b'commands', b'update.requiredest') and opts.get(
+        b'update'
+    ):
+        msg = _(b'update destination required by configuration')
+        hint = _(b'use hg pull followed by hg update DEST')
         raise error.Abort(msg, hint=hint)
 
-    source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
-    ui.status(_('pulling from %s\n') % util.hidepassword(source))
+    source, branches = hg.parseurl(ui.expandpath(source), opts.get(b'branch'))
+    ui.status(_(b'pulling from %s\n') % util.hidepassword(source))
     other = hg.peer(repo, opts, source)
     try:
         revs, checkout = hg.addbranchrevs(
-            repo, other, branches, opts.get('rev')
+            repo, other, branches, opts.get(b'rev')
         )
 
         pullopargs = {}
 
         nodes = None
-        if opts.get('bookmark') or revs:
+        if opts.get(b'bookmark') or revs:
             # The list of bookmark used here is the same used to actually update
             # the bookmark names, to avoid the race from issue 4689 and we do
             # all lookup and bookmark queries in one go so they see the same
@@ -5266,25 +5403,25 @@
             nodes = []
             fnodes = []
             revs = revs or []
-            if revs and not other.capable('lookup'):
+            if revs and not other.capable(b'lookup'):
                 err = _(
-                    "other repository doesn't support revision lookup, "
-                    "so a rev cannot be specified."
+                    b"other repository doesn't support revision lookup, "
+                    b"so a rev cannot be specified."
                 )
                 raise error.Abort(err)
             with other.commandexecutor() as e:
                 fremotebookmarks = e.callcommand(
-                    'listkeys', {'namespace': 'bookmarks'}
+                    b'listkeys', {b'namespace': b'bookmarks'}
                 )
                 for r in revs:
-                    fnodes.append(e.callcommand('lookup', {'key': r}))
+                    fnodes.append(e.callcommand(b'lookup', {b'key': r}))
             remotebookmarks = fremotebookmarks.result()
             remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks)
-            pullopargs['remotebookmarks'] = remotebookmarks
-            for b in opts.get('bookmark', []):
+            pullopargs[b'remotebookmarks'] = remotebookmarks
+            for b in opts.get(b'bookmark', []):
                 b = repo._bookmarks.expandname(b)
                 if b not in remotebookmarks:
-                    raise error.Abort(_('remote bookmark %s not found!') % b)
+                    raise error.Abort(_(b'remote bookmark %s not found!') % b)
                 nodes.append(remotebookmarks[b])
             for i, rev in enumerate(revs):
                 node = fnodes[i].result()
@@ -5293,16 +5430,16 @@
                     checkout = node
 
         wlock = util.nullcontextmanager()
-        if opts.get('update'):
+        if opts.get(b'update'):
             wlock = repo.wlock()
         with wlock:
-            pullopargs.update(opts.get('opargs', {}))
+            pullopargs.update(opts.get(b'opargs', {}))
             modheads = exchange.pull(
                 repo,
                 other,
                 heads=nodes,
-                force=opts.get('force'),
-                bookmarks=opts.get('bookmark', ()),
+                force=opts.get(b'force'),
+                bookmarks=opts.get(b'bookmark', ()),
                 opargs=pullopargs,
             ).cgresult
 
@@ -5317,19 +5454,19 @@
                 # order below depends on implementation of
                 # hg.addbranchrevs(). opts['bookmark'] is ignored,
                 # because 'checkout' is determined without it.
-                if opts.get('rev'):
-                    brev = opts['rev'][0]
-                elif opts.get('branch'):
-                    brev = opts['branch'][0]
+                if opts.get(b'rev'):
+                    brev = opts[b'rev'][0]
+                elif opts.get(b'branch'):
+                    brev = opts[b'branch'][0]
                 else:
                     brev = branches[0]
             repo._subtoppath = source
             try:
                 ret = postincoming(
-                    ui, repo, modheads, opts.get('update'), checkout, brev
+                    ui, repo, modheads, opts.get(b'update'), checkout, brev
                 )
             except error.FilteredRepoLookupError as exc:
-                msg = _('cannot update to target: %s') % exc.args[0]
+                msg = _(b'cannot update to target: %s') % exc.args[0]
                 exc.args = (msg,) + exc.args[1:]
                 raise
             finally:
@@ -5341,40 +5478,40 @@
 
 
 @command(
-    'push',
+    b'push',
     [
-        ('f', 'force', None, _('force push')),
+        (b'f', b'force', None, _(b'force push')),
         (
-            'r',
-            'rev',
+            b'r',
+            b'rev',
             [],
-            _('a changeset intended to be included in the destination'),
-            _('REV'),
+            _(b'a changeset intended to be included in the destination'),
+            _(b'REV'),
         ),
-        ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
+        (b'B', b'bookmark', [], _(b"bookmark to push"), _(b'BOOKMARK')),
         (
-            'b',
-            'branch',
+            b'b',
+            b'branch',
             [],
-            _('a specific branch you would like to push'),
-            _('BRANCH'),
+            _(b'a specific branch you would like to push'),
+            _(b'BRANCH'),
         ),
-        ('', 'new-branch', False, _('allow pushing a new branch')),
+        (b'', b'new-branch', False, _(b'allow pushing a new branch')),
         (
-            '',
-            'pushvars',
+            b'',
+            b'pushvars',
             [],
-            _('variables that can be sent to server (ADVANCED)'),
+            _(b'variables that can be sent to server (ADVANCED)'),
         ),
         (
-            '',
-            'publish',
+            b'',
+            b'publish',
             False,
-            _('push the changeset as public (EXPERIMENTAL)'),
+            _(b'push the changeset as public (EXPERIMENTAL)'),
         ),
     ]
     + remoteopts,
-    _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'),
+    _(b'[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'),
     helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
     helpbasic=True,
 )
@@ -5436,52 +5573,52 @@
     """
 
     opts = pycompat.byteskwargs(opts)
-    if opts.get('bookmark'):
-        ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
-        for b in opts['bookmark']:
+    if opts.get(b'bookmark'):
+        ui.setconfig(b'bookmarks', b'pushing', opts[b'bookmark'], b'push')
+        for b in opts[b'bookmark']:
             # translate -B options to -r so changesets get pushed
             b = repo._bookmarks.expandname(b)
             if b in repo._bookmarks:
-                opts.setdefault('rev', []).append(b)
+                opts.setdefault(b'rev', []).append(b)
             else:
                 # if we try to push a deleted bookmark, translate it to null
                 # this lets simultaneous -r, -b options continue working
-                opts.setdefault('rev', []).append("null")
-
-    path = ui.paths.getpath(dest, default=('default-push', 'default'))
+                opts.setdefault(b'rev', []).append(b"null")
+
+    path = ui.paths.getpath(dest, default=(b'default-push', b'default'))
     if not path:
         raise error.Abort(
-            _('default repository not configured!'),
-            hint=_("see 'hg help config.paths'"),
+            _(b'default repository not configured!'),
+            hint=_(b"see 'hg help config.paths'"),
         )
     dest = path.pushloc or path.loc
-    branches = (path.branch, opts.get('branch') or [])
-    ui.status(_('pushing to %s\n') % util.hidepassword(dest))
-    revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
+    branches = (path.branch, opts.get(b'branch') or [])
+    ui.status(_(b'pushing to %s\n') % util.hidepassword(dest))
+    revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get(b'rev'))
     other = hg.peer(repo, opts, dest)
 
     if revs:
         revs = [repo[r].node() for r in scmutil.revrange(repo, revs)]
         if not revs:
             raise error.Abort(
-                _("specified revisions evaluate to an empty set"),
-                hint=_("use different revision arguments"),
+                _(b"specified revisions evaluate to an empty set"),
+                hint=_(b"use different revision arguments"),
             )
     elif path.pushrev:
         # It doesn't make any sense to specify ancestor revisions. So limit
         # to DAG heads to make discovery simpler.
-        expr = revsetlang.formatspec('heads(%r)', path.pushrev)
+        expr = revsetlang.formatspec(b'heads(%r)', path.pushrev)
         revs = scmutil.revrange(repo, [expr])
         revs = [repo[rev].node() for rev in revs]
         if not revs:
             raise error.Abort(
-                _('default push revset for path evaluates to an ' 'empty set')
+                _(b'default push revset for path evaluates to an ' b'empty set')
             )
 
     repo._subtoppath = dest
     try:
         # push subrepos depth-first for coherent ordering
-        c = repo['.']
+        c = repo[b'.']
         subs = c.substate  # only repos that are committed
         for s in sorted(subs):
             result = c.sub(s).push(opts)
@@ -5490,17 +5627,17 @@
     finally:
         del repo._subtoppath
 
-    opargs = dict(opts.get('opargs', {}))  # copy opargs since we may mutate it
-    opargs.setdefault('pushvars', []).extend(opts.get('pushvars', []))
+    opargs = dict(opts.get(b'opargs', {}))  # copy opargs since we may mutate it
+    opargs.setdefault(b'pushvars', []).extend(opts.get(b'pushvars', []))
 
     pushop = exchange.push(
         repo,
         other,
-        opts.get('force'),
+        opts.get(b'force'),
         revs=revs,
-        newbranch=opts.get('new_branch'),
-        bookmarks=opts.get('bookmark', ()),
-        publish=opts.get('publish'),
+        newbranch=opts.get(b'new_branch'),
+        bookmarks=opts.get(b'bookmark', ()),
+        publish=opts.get(b'publish'),
         opargs=opargs,
     )
 
@@ -5516,8 +5653,8 @@
 
 
 @command(
-    'recover',
-    [('', 'verify', True, "run `hg verify` after succesful recover"),],
+    b'recover',
+    [(b'', b'verify', True, b"run `hg verify` after succesful recover"),],
     helpcategory=command.CATEGORY_MAINTENANCE,
 )
 def recover(ui, repo, **opts):
@@ -5537,8 +5674,8 @@
             return hg.verify(repo)
         else:
             msg = _(
-                "(verify step skipped, run  `hg verify` to check your "
-                "repository content)\n"
+                b"(verify step skipped, run  `hg verify` to check your "
+                b"repository content)\n"
             )
             ui.warn(msg)
             return 0
@@ -5546,15 +5683,15 @@
 
 
 @command(
-    'remove|rm',
+    b'remove|rm',
     [
-        ('A', 'after', None, _('record delete for missing files')),
-        ('f', 'force', None, _('forget added files, delete modified files')),
+        (b'A', b'after', None, _(b'record delete for missing files')),
+        (b'f', b'force', None, _(b'forget added files, delete modified files')),
     ]
     + subrepoopts
     + walkopts
     + dryrunopts,
-    _('[OPTION]... FILE...'),
+    _(b'[OPTION]... FILE...'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
     helpbasic=True,
     inferrepo=True,
@@ -5599,28 +5736,33 @@
     """
 
     opts = pycompat.byteskwargs(opts)
-    after, force = opts.get('after'), opts.get('force')
-    dryrun = opts.get('dry_run')
+    after, force = opts.get(b'after'), opts.get(b'force')
+    dryrun = opts.get(b'dry_run')
     if not pats and not after:
-        raise error.Abort(_('no files specified'))
+        raise error.Abort(_(b'no files specified'))
 
     m = scmutil.match(repo[None], pats, opts)
-    subrepos = opts.get('subrepos')
+    subrepos = opts.get(b'subrepos')
     uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
     return cmdutil.remove(
-        ui, repo, m, "", uipathfn, after, force, subrepos, dryrun=dryrun
+        ui, repo, m, b"", uipathfn, after, force, subrepos, dryrun=dryrun
     )
 
 
 @command(
-    'rename|move|mv',
+    b'rename|move|mv',
     [
-        ('A', 'after', None, _('record a rename that has already occurred')),
-        ('f', 'force', None, _('forcibly move over an existing managed file')),
+        (b'A', b'after', None, _(b'record a rename that has already occurred')),
+        (
+            b'f',
+            b'force',
+            None,
+            _(b'forcibly move over an existing managed file'),
+        ),
     ]
     + walkopts
     + dryrunopts,
-    _('[OPTION]... SOURCE... DEST'),
+    _(b'[OPTION]... SOURCE... DEST'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
 )
 def rename(ui, repo, *pats, **opts):
@@ -5645,19 +5787,19 @@
 
 
 @command(
-    'resolve',
+    b'resolve',
     [
-        ('a', 'all', None, _('select all unresolved files')),
-        ('l', 'list', None, _('list state of files needing merge')),
-        ('m', 'mark', None, _('mark files as resolved')),
-        ('u', 'unmark', None, _('mark files as unresolved')),
-        ('n', 'no-status', None, _('hide status prefix')),
-        ('', 're-merge', None, _('re-merge files')),
+        (b'a', b'all', None, _(b'select all unresolved files')),
+        (b'l', b'list', None, _(b'list state of files needing merge')),
+        (b'm', b'mark', None, _(b'mark files as resolved')),
+        (b'u', b'unmark', None, _(b'mark files as unresolved')),
+        (b'n', b'no-status', None, _(b'hide status prefix')),
+        (b'', b're-merge', None, _(b're-merge files')),
     ]
     + mergetoolopts
     + walkopts
     + formatteropts,
-    _('[OPTION]... [FILE]...'),
+    _(b'[OPTION]... [FILE]...'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
     inferrepo=True,
 )
@@ -5714,24 +5856,24 @@
     """
 
     opts = pycompat.byteskwargs(opts)
-    confirm = ui.configbool('commands', 'resolve.confirm')
-    flaglist = 'all mark unmark list no_status re_merge'.split()
+    confirm = ui.configbool(b'commands', b'resolve.confirm')
+    flaglist = b'all mark unmark list no_status re_merge'.split()
     all, mark, unmark, show, nostatus, remerge = [opts.get(o) for o in flaglist]
 
     actioncount = len(list(filter(None, [show, mark, unmark, remerge])))
     if actioncount > 1:
-        raise error.Abort(_("too many actions specified"))
+        raise error.Abort(_(b"too many actions specified"))
     elif actioncount == 0 and ui.configbool(
-        'commands', 'resolve.explicit-re-merge'
+        b'commands', b'resolve.explicit-re-merge'
     ):
-        hint = _('use --mark, --unmark, --list or --re-merge')
-        raise error.Abort(_('no action specified'), hint=hint)
+        hint = _(b'use --mark, --unmark, --list or --re-merge')
+        raise error.Abort(_(b'no action specified'), hint=hint)
     if pats and all:
-        raise error.Abort(_("can't specify --all and patterns"))
+        raise error.Abort(_(b"can't specify --all and patterns"))
     if not (all or pats or show or mark or unmark):
         raise error.Abort(
-            _('no files or directories specified'),
-            hint='use --all to re-merge all unresolved files',
+            _(b'no files or directories specified'),
+            hint=b'use --all to re-merge all unresolved files',
         )
 
     if confirm:
@@ -5739,7 +5881,7 @@
             if ui.promptchoice(
                 _(b're-merge all unresolved files (yn)?' b'$$ &Yes $$ &No')
             ):
-                raise error.Abort(_('user quit'))
+                raise error.Abort(_(b'user quit'))
         if mark and not pats:
             if ui.promptchoice(
                 _(
@@ -5747,7 +5889,7 @@
                     b'$$ &Yes $$ &No'
                 )
             ):
-                raise error.Abort(_('user quit'))
+                raise error.Abort(_(b'user quit'))
         if unmark and not pats:
             if ui.promptchoice(
                 _(
@@ -5755,13 +5897,13 @@
                     b'$$ &Yes $$ &No'
                 )
             ):
-                raise error.Abort(_('user quit'))
+                raise error.Abort(_(b'user quit'))
 
     uipathfn = scmutil.getuipathfn(repo)
 
     if show:
-        ui.pager('resolve')
-        fm = ui.formatter('resolve', opts)
+        ui.pager(b'resolve')
+        fm = ui.formatter(b'resolve', opts)
         ms = mergemod.mergestate.read(repo)
         wctx = repo[None]
         m = scmutil.match(wctx, pats, opts)
@@ -5770,13 +5912,16 @@
         # as 'P'.  Resolved path conflicts show as 'R', the same as normal
         # resolved conflicts.
         mergestateinfo = {
-            mergemod.MERGE_RECORD_UNRESOLVED: ('resolve.unresolved', 'U'),
-            mergemod.MERGE_RECORD_RESOLVED: ('resolve.resolved', 'R'),
-            mergemod.MERGE_RECORD_UNRESOLVED_PATH: ('resolve.unresolved', 'P'),
-            mergemod.MERGE_RECORD_RESOLVED_PATH: ('resolve.resolved', 'R'),
+            mergemod.MERGE_RECORD_UNRESOLVED: (b'resolve.unresolved', b'U'),
+            mergemod.MERGE_RECORD_RESOLVED: (b'resolve.resolved', b'R'),
+            mergemod.MERGE_RECORD_UNRESOLVED_PATH: (
+                b'resolve.unresolved',
+                b'P',
+            ),
+            mergemod.MERGE_RECORD_RESOLVED_PATH: (b'resolve.resolved', b'R'),
             mergemod.MERGE_RECORD_DRIVER_RESOLVED: (
-                'resolve.driverresolved',
-                'D',
+                b'resolve.driverresolved',
+                b'D',
             ),
         }
 
@@ -5787,9 +5932,9 @@
             label, key = mergestateinfo[ms[f]]
             fm.startitem()
             fm.context(ctx=wctx)
-            fm.condwrite(not nostatus, 'mergestatus', '%s ', key, label=label)
+            fm.condwrite(not nostatus, b'mergestatus', b'%s ', key, label=label)
             fm.data(path=f)
-            fm.plain('%s\n' % uipathfn(f), label=label)
+            fm.plain(b'%s\n' % uipathfn(f), label=label)
         fm.end()
         return 0
 
@@ -5798,7 +5943,7 @@
 
         if not (ms.active() or repo.dirstate.p2() != nullid):
             raise error.Abort(
-                _('resolve command not applicable when not merging')
+                _(b'resolve command not applicable when not merging')
             )
 
         wctx = repo[None]
@@ -5821,8 +5966,8 @@
         tocomplete = []
         hasconflictmarkers = []
         if mark:
-            markcheck = ui.config('commands', 'resolve.mark-check')
-            if markcheck not in ['warn', 'abort']:
+            markcheck = ui.config(b'commands', b'resolve.mark-check')
+            if markcheck not in [b'warn', b'abort']:
                 # Treat all invalid / unrecognized values as 'none'.
                 markcheck = False
         for f in ms:
@@ -5838,13 +5983,13 @@
                 if mark:
                     if exact:
                         ui.warn(
-                            _('not marking %s as it is driver-resolved\n')
+                            _(b'not marking %s as it is driver-resolved\n')
                             % uipathfn(f)
                         )
                 elif unmark:
                     if exact:
                         ui.warn(
-                            _('not unmarking %s as it is driver-resolved\n')
+                            _(b'not unmarking %s as it is driver-resolved\n')
                             % uipathfn(f)
                         )
                 else:
@@ -5862,7 +6007,7 @@
                     ms.mark(f, mergemod.MERGE_RECORD_UNRESOLVED_PATH)
                 elif ms[f] == mergemod.MERGE_RECORD_UNRESOLVED_PATH:
                     ui.warn(
-                        _('%s: path conflict must be resolved manually\n')
+                        _(b'%s: path conflict must be resolved manually\n')
                         % uipathfn(f)
                     )
                 continue
@@ -5882,15 +6027,15 @@
                 # backup pre-resolve (merge uses .orig for its own purposes)
                 a = repo.wjoin(f)
                 try:
-                    util.copyfile(a, a + ".resolve")
+                    util.copyfile(a, a + b".resolve")
                 except (IOError, OSError) as inst:
                     if inst.errno != errno.ENOENT:
                         raise
 
                 try:
                     # preresolve file
-                    overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
-                    with ui.configoverride(overrides, 'resolve'):
+                    overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
+                    with ui.configoverride(overrides, b'resolve'):
                         complete, r = ms.preresolve(f, wctx)
                     if not complete:
                         tocomplete.append(f)
@@ -5904,7 +6049,7 @@
                 if complete:
                     try:
                         util.rename(
-                            a + ".resolve", scmutil.backuppath(ui, repo, f)
+                            a + b".resolve", scmutil.backuppath(ui, repo, f)
                         )
                     except OSError as inst:
                         if inst.errno != errno.ENOENT:
@@ -5913,22 +6058,24 @@
         if hasconflictmarkers:
             ui.warn(
                 _(
-                    'warning: the following files still have conflict '
-                    'markers:\n'
+                    b'warning: the following files still have conflict '
+                    b'markers:\n'
                 )
-                + ''.join('  ' + uipathfn(f) + '\n' for f in hasconflictmarkers)
+                + b''.join(
+                    b'  ' + uipathfn(f) + b'\n' for f in hasconflictmarkers
+                )
             )
-            if markcheck == 'abort' and not all and not pats:
+            if markcheck == b'abort' and not all and not pats:
                 raise error.Abort(
-                    _('conflict markers detected'),
-                    hint=_('use --all to mark anyway'),
+                    _(b'conflict markers detected'),
+                    hint=_(b'use --all to mark anyway'),
                 )
 
         for f in tocomplete:
             try:
                 # resolve file
-                overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
-                with ui.configoverride(overrides, 'resolve'):
+                overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
+                with ui.configoverride(overrides, b'resolve'):
                     r = ms.resolve(f, wctx)
                 if r:
                     ret = 1
@@ -5938,7 +6085,7 @@
             # replace filemerge's .orig file with our resolve file
             a = repo.wjoin(f)
             try:
-                util.rename(a + ".resolve", scmutil.backuppath(ui, repo, f))
+                util.rename(a + b".resolve", scmutil.backuppath(ui, repo, f))
             except OSError as inst:
                 if inst.errno != errno.ENOENT:
                     raise
@@ -5948,28 +6095,28 @@
 
         if not didwork and pats:
             hint = None
-            if not any([p for p in pats if p.find(':') >= 0]):
-                pats = ['path:%s' % p for p in pats]
+            if not any([p for p in pats if p.find(b':') >= 0]):
+                pats = [b'path:%s' % p for p in pats]
                 m = scmutil.match(wctx, pats, opts)
                 for f in ms:
                     if not m(f):
                         continue
 
                     def flag(o):
-                        if o == 're_merge':
-                            return '--re-merge '
-                        return '-%s ' % o[0:1]
-
-                    flags = ''.join([flag(o) for o in flaglist if opts.get(o)])
-                    hint = _("(try: hg resolve %s%s)\n") % (
+                        if o == b're_merge':
+                            return b'--re-merge '
+                        return b'-%s ' % o[0:1]
+
+                    flags = b''.join([flag(o) for o in flaglist if opts.get(o)])
+                    hint = _(b"(try: hg resolve %s%s)\n") % (
                         flags,
-                        ' '.join(pats),
+                        b' '.join(pats),
                     )
                     break
-            ui.warn(_("arguments do not match paths that need resolving\n"))
+            ui.warn(_(b"arguments do not match paths that need resolving\n"))
             if hint:
                 ui.warn(hint)
-        elif ms.mergedriver and ms.mdstate() != 's':
+        elif ms.mergedriver and ms.mdstate() != b's':
             # run conclude step when either a driver-resolved file is requested
             # or there are no driver-resolved files
             # we can't use 'ret' to determine whether any files are unresolved
@@ -5986,13 +6133,13 @@
     unresolvedf = list(ms.unresolved())
     driverresolvedf = list(ms.driverresolved())
     if not unresolvedf and not driverresolvedf:
-        ui.status(_('(no more unresolved files)\n'))
+        ui.status(_(b'(no more unresolved files)\n'))
         cmdutil.checkafterresolved(repo)
     elif not unresolvedf:
         ui.status(
             _(
-                '(no more unresolved files -- '
-                'run "hg resolve --all" to conclude)\n'
+                b'(no more unresolved files -- '
+                b'run "hg resolve --all" to conclude)\n'
             )
         )
 
@@ -6000,17 +6147,17 @@
 
 
 @command(
-    'revert',
+    b'revert',
     [
-        ('a', 'all', None, _('revert all changes when no arguments given')),
-        ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
-        ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
-        ('C', 'no-backup', None, _('do not save backup copies of files')),
-        ('i', 'interactive', None, _('interactively select the changes')),
+        (b'a', b'all', None, _(b'revert all changes when no arguments given')),
+        (b'd', b'date', b'', _(b'tipmost revision matching date'), _(b'DATE')),
+        (b'r', b'rev', b'', _(b'revert to the specified revision'), _(b'REV')),
+        (b'C', b'no-backup', None, _(b'do not save backup copies of files')),
+        (b'i', b'interactive', None, _(b'interactively select the changes')),
     ]
     + walkopts
     + dryrunopts,
-    _('[OPTION]... [-r REV] [NAME]...'),
+    _(b'[OPTION]... [-r REV] [NAME]...'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
 )
 def revert(ui, repo, *pats, **opts):
@@ -6051,36 +6198,36 @@
     """
 
     opts = pycompat.byteskwargs(opts)
-    if opts.get("date"):
-        if opts.get("rev"):
-            raise error.Abort(_("you can't specify a revision and a date"))
-        opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
+    if opts.get(b"date"):
+        if opts.get(b"rev"):
+            raise error.Abort(_(b"you can't specify a revision and a date"))
+        opts[b"rev"] = cmdutil.finddate(ui, repo, opts[b"date"])
 
     parent, p2 = repo.dirstate.parents()
-    if not opts.get('rev') and p2 != nullid:
+    if not opts.get(b'rev') and p2 != nullid:
         # revert after merge is a trap for new users (issue2915)
         raise error.Abort(
-            _('uncommitted merge with no revision specified'),
-            hint=_("use 'hg update' or see 'hg help revert'"),
+            _(b'uncommitted merge with no revision specified'),
+            hint=_(b"use 'hg update' or see 'hg help revert'"),
         )
 
-    rev = opts.get('rev')
+    rev = opts.get(b'rev')
     if rev:
-        repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
+        repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
     ctx = scmutil.revsingle(repo, rev)
 
     if not (
         pats
-        or opts.get('include')
-        or opts.get('exclude')
-        or opts.get('all')
-        or opts.get('interactive')
+        or opts.get(b'include')
+        or opts.get(b'exclude')
+        or opts.get(b'all')
+        or opts.get(b'interactive')
     ):
-        msg = _("no files or directories specified")
+        msg = _(b"no files or directories specified")
         if p2 != nullid:
             hint = _(
-                "uncommitted merge, use --all to discard all changes,"
-                " or 'hg update -C .' to abort the merge"
+                b"uncommitted merge, use --all to discard all changes,"
+                b" or 'hg update -C .' to abort the merge"
             )
             raise error.Abort(msg, hint=hint)
         dirty = any(repo.status())
@@ -6089,23 +6236,23 @@
             if dirty:
                 hint = (
                     _(
-                        "uncommitted changes, use --all to discard all"
-                        " changes, or 'hg update %d' to update"
+                        b"uncommitted changes, use --all to discard all"
+                        b" changes, or 'hg update %d' to update"
                     )
                     % ctx.rev()
                 )
             else:
                 hint = (
                     _(
-                        "use --all to revert all files,"
-                        " or 'hg update %d' to update"
+                        b"use --all to revert all files,"
+                        b" or 'hg update %d' to update"
                     )
                     % ctx.rev()
                 )
         elif dirty:
-            hint = _("uncommitted changes, use --all to discard all changes")
+            hint = _(b"uncommitted changes, use --all to discard all changes")
         else:
-            hint = _("use --all to revert all files")
+            hint = _(b"use --all to revert all files")
         raise error.Abort(msg, hint=hint)
 
     return cmdutil.revert(
@@ -6114,8 +6261,8 @@
 
 
 @command(
-    'rollback',
-    dryrunopts + [('f', 'force', False, _('ignore safety measures'))],
+    b'rollback',
+    dryrunopts + [(b'f', b'force', False, _(b'ignore safety measures'))],
     helpcategory=command.CATEGORY_MAINTENANCE,
 )
 def rollback(ui, repo, **opts):
@@ -6163,16 +6310,16 @@
 
     Returns 0 on success, 1 if no rollback data is available.
     """
-    if not ui.configbool('ui', 'rollback'):
+    if not ui.configbool(b'ui', b'rollback'):
         raise error.Abort(
-            _('rollback is disabled because it is unsafe'),
-            hint='see `hg help -v rollback` for information',
+            _(b'rollback is disabled because it is unsafe'),
+            hint=b'see `hg help -v rollback` for information',
         )
     return repo.rollback(dryrun=opts.get(r'dry_run'), force=opts.get(r'force'))
 
 
 @command(
-    'root',
+    b'root',
     [] + formatteropts,
     intents={INTENT_READONLY},
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
@@ -6195,85 +6342,97 @@
     Returns 0 on success.
     """
     opts = pycompat.byteskwargs(opts)
-    with ui.formatter('root', opts) as fm:
+    with ui.formatter(b'root', opts) as fm:
         fm.startitem()
-        fm.write('reporoot', '%s\n', repo.root)
+        fm.write(b'reporoot', b'%s\n', repo.root)
         fm.data(hgpath=repo.path, storepath=repo.spath)
 
 
 @command(
-    'serve',
+    b'serve',
     [
         (
-            'A',
-            'accesslog',
-            '',
-            _('name of access log file to write to'),
-            _('FILE'),
+            b'A',
+            b'accesslog',
+            b'',
+            _(b'name of access log file to write to'),
+            _(b'FILE'),
         ),
-        ('d', 'daemon', None, _('run server in background')),
-        ('', 'daemon-postexec', [], _('used internally by daemon mode')),
+        (b'd', b'daemon', None, _(b'run server in background')),
+        (b'', b'daemon-postexec', [], _(b'used internally by daemon mode')),
         (
-            'E',
-            'errorlog',
-            '',
-            _('name of error log file to write to'),
-            _('FILE'),
+            b'E',
+            b'errorlog',
+            b'',
+            _(b'name of error log file to write to'),
+            _(b'FILE'),
         ),
         # use string type, then we can check if something was passed
-        ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
         (
-            'a',
-            'address',
-            '',
-            _('address to listen on (default: all interfaces)'),
-            _('ADDR'),
+            b'p',
+            b'port',
+            b'',
+            _(b'port to listen on (default: 8000)'),
+            _(b'PORT'),
         ),
         (
-            '',
-            'prefix',
-            '',
-            _('prefix path to serve from (default: server root)'),
-            _('PREFIX'),
+            b'a',
+            b'address',
+            b'',
+            _(b'address to listen on (default: all interfaces)'),
+            _(b'ADDR'),
+        ),
+        (
+            b'',
+            b'prefix',
+            b'',
+            _(b'prefix path to serve from (default: server root)'),
+            _(b'PREFIX'),
         ),
         (
-            'n',
-            'name',
-            '',
-            _('name to show in web pages (default: working directory)'),
-            _('NAME'),
+            b'n',
+            b'name',
+            b'',
+            _(b'name to show in web pages (default: working directory)'),
+            _(b'NAME'),
         ),
         (
-            '',
-            'web-conf',
-            '',
-            _("name of the hgweb config file (see 'hg help hgweb')"),
-            _('FILE'),
+            b'',
+            b'web-conf',
+            b'',
+            _(b"name of the hgweb config file (see 'hg help hgweb')"),
+            _(b'FILE'),
+        ),
+        (
+            b'',
+            b'webdir-conf',
+            b'',
+            _(b'name of the hgweb config file (DEPRECATED)'),
+            _(b'FILE'),
         ),
         (
-            '',
-            'webdir-conf',
-            '',
-            _('name of the hgweb config file (DEPRECATED)'),
-            _('FILE'),
+            b'',
+            b'pid-file',
+            b'',
+            _(b'name of file to write process ID to'),
+            _(b'FILE'),
         ),
+        (b'', b'stdio', None, _(b'for remote clients (ADVANCED)')),
         (
-            '',
-            'pid-file',
-            '',
-            _('name of file to write process ID to'),
-            _('FILE'),
+            b'',
+            b'cmdserver',
+            b'',
+            _(b'for remote clients (ADVANCED)'),
+            _(b'MODE'),
         ),
-        ('', 'stdio', None, _('for remote clients (ADVANCED)')),
-        ('', 'cmdserver', '', _('for remote clients (ADVANCED)'), _('MODE')),
-        ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
-        ('', 'style', '', _('template style to use'), _('STYLE')),
-        ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
-        ('', 'certificate', '', _('SSL certificate file'), _('FILE')),
-        ('', 'print-url', None, _('start and print only the URL')),
+        (b't', b'templates', b'', _(b'web templates to use'), _(b'TEMPLATE')),
+        (b'', b'style', b'', _(b'template style to use'), _(b'STYLE')),
+        (b'6', b'ipv6', None, _(b'use IPv6 in addition to IPv4')),
+        (b'', b'certificate', b'', _(b'SSL certificate file'), _(b'FILE')),
+        (b'', b'print-url', None, _(b'start and print only the URL')),
     ]
     + subrepoopts,
-    _('[OPTION]...'),
+    _(b'[OPTION]...'),
     helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
     helpbasic=True,
     optionalrepo=True,
@@ -6304,15 +6463,15 @@
     """
 
     opts = pycompat.byteskwargs(opts)
-    if opts["stdio"] and opts["cmdserver"]:
-        raise error.Abort(_("cannot use --stdio with --cmdserver"))
-    if opts["print_url"] and ui.verbose:
-        raise error.Abort(_("cannot use --print-url with --verbose"))
-
-    if opts["stdio"]:
+    if opts[b"stdio"] and opts[b"cmdserver"]:
+        raise error.Abort(_(b"cannot use --stdio with --cmdserver"))
+    if opts[b"print_url"] and ui.verbose:
+        raise error.Abort(_(b"cannot use --print-url with --verbose"))
+
+    if opts[b"stdio"]:
         if repo is None:
             raise error.RepoError(
-                _("there is no Mercurial repository here" " (.hg not found)")
+                _(b"there is no Mercurial repository here" b" (.hg not found)")
             )
         s = wireprotoserver.sshserver(ui, repo)
         s.serve_forever()
@@ -6322,56 +6481,62 @@
 
 
 @command(
-    'shelve',
+    b'shelve',
     [
         (
-            'A',
-            'addremove',
+            b'A',
+            b'addremove',
             None,
-            _('mark new/missing files as added/removed before shelving'),
+            _(b'mark new/missing files as added/removed before shelving'),
         ),
-        ('u', 'unknown', None, _('store unknown files in the shelve')),
-        ('', 'cleanup', None, _('delete all shelved changes')),
-        ('', 'date', '', _('shelve with the specified commit date'), _('DATE')),
-        ('d', 'delete', None, _('delete the named shelved change(s)')),
-        ('e', 'edit', False, _('invoke editor on commit messages')),
+        (b'u', b'unknown', None, _(b'store unknown files in the shelve')),
+        (b'', b'cleanup', None, _(b'delete all shelved changes')),
         (
-            'k',
-            'keep',
-            False,
-            _('shelve, but keep changes in the working directory'),
+            b'',
+            b'date',
+            b'',
+            _(b'shelve with the specified commit date'),
+            _(b'DATE'),
         ),
-        ('l', 'list', None, _('list current shelves')),
-        ('m', 'message', '', _('use text as shelve message'), _('TEXT')),
+        (b'd', b'delete', None, _(b'delete the named shelved change(s)')),
+        (b'e', b'edit', False, _(b'invoke editor on commit messages')),
         (
-            'n',
-            'name',
-            '',
-            _('use the given name for the shelved commit'),
-            _('NAME'),
+            b'k',
+            b'keep',
+            False,
+            _(b'shelve, but keep changes in the working directory'),
+        ),
+        (b'l', b'list', None, _(b'list current shelves')),
+        (b'm', b'message', b'', _(b'use text as shelve message'), _(b'TEXT')),
+        (
+            b'n',
+            b'name',
+            b'',
+            _(b'use the given name for the shelved commit'),
+            _(b'NAME'),
         ),
         (
-            'p',
-            'patch',
+            b'p',
+            b'patch',
             None,
             _(
-                'output patches for changes (provide the names of the shelved '
-                'changes as positional arguments)'
+                b'output patches for changes (provide the names of the shelved '
+                b'changes as positional arguments)'
             ),
         ),
-        ('i', 'interactive', None, _('interactive mode')),
+        (b'i', b'interactive', None, _(b'interactive mode')),
         (
-            '',
-            'stat',
+            b'',
+            b'stat',
             None,
             _(
-                'output diffstat-style summary of changes (provide the names of '
-                'the shelved changes as positional arguments)'
+                b'output diffstat-style summary of changes (provide the names of '
+                b'the shelved changes as positional arguments)'
             ),
         ),
     ]
     + cmdutil.walkopts,
-    _('hg shelve [OPTION]... [FILE]...'),
+    _(b'hg shelve [OPTION]... [FILE]...'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
 )
 def shelve(ui, repo, *pats, **opts):
@@ -6409,18 +6574,18 @@
     '''
     opts = pycompat.byteskwargs(opts)
     allowables = [
-        ('addremove', {'create'}),  # 'create' is pseudo action
-        ('unknown', {'create'}),
-        ('cleanup', {'cleanup'}),
+        (b'addremove', {b'create'}),  # 'create' is pseudo action
+        (b'unknown', {b'create'}),
+        (b'cleanup', {b'cleanup'}),
         #       ('date', {'create'}), # ignored for passing '--date "0 0"' in tests
-        ('delete', {'delete'}),
-        ('edit', {'create'}),
-        ('keep', {'create'}),
-        ('list', {'list'}),
-        ('message', {'create'}),
-        ('name', {'create'}),
-        ('patch', {'patch', 'list'}),
-        ('stat', {'stat', 'list'}),
+        (b'delete', {b'delete'}),
+        (b'edit', {b'create'}),
+        (b'keep', {b'create'}),
+        (b'list', {b'list'}),
+        (b'message', {b'create'}),
+        (b'name', {b'create'}),
+        (b'patch', {b'patch', b'list'}),
+        (b'stat', {b'stat', b'list'}),
     ]
 
     def checkopt(opt):
@@ -6429,52 +6594,63 @@
                 if opts[i] and opt not in allowable:
                     raise error.Abort(
                         _(
-                            "options '--%s' and '--%s' may not be "
-                            "used together"
+                            b"options '--%s' and '--%s' may not be "
+                            b"used together"
                         )
                         % (opt, i)
                     )
             return True
 
-    if checkopt('cleanup'):
+    if checkopt(b'cleanup'):
         if pats:
-            raise error.Abort(_("cannot specify names when using '--cleanup'"))
+            raise error.Abort(_(b"cannot specify names when using '--cleanup'"))
         return shelvemod.cleanupcmd(ui, repo)
-    elif checkopt('delete'):
+    elif checkopt(b'delete'):
         return shelvemod.deletecmd(ui, repo, pats)
-    elif checkopt('list'):
+    elif checkopt(b'list'):
         return shelvemod.listcmd(ui, repo, pats, opts)
-    elif checkopt('patch') or checkopt('stat'):
+    elif checkopt(b'patch') or checkopt(b'stat'):
         return shelvemod.patchcmds(ui, repo, pats, opts)
     else:
         return shelvemod.createcmd(ui, repo, pats, opts)
 
 
-_NOTTERSE = 'nothing'
+_NOTTERSE = b'nothing'
 
 
 @command(
-    'status|st',
+    b'status|st',
     [
-        ('A', 'all', None, _('show status of all files')),
-        ('m', 'modified', None, _('show only modified files')),
-        ('a', 'added', None, _('show only added files')),
-        ('r', 'removed', None, _('show only removed files')),
-        ('d', 'deleted', None, _('show only deleted (but tracked) files')),
-        ('c', 'clean', None, _('show only files without changes')),
-        ('u', 'unknown', None, _('show only unknown (not tracked) files')),
-        ('i', 'ignored', None, _('show only ignored files')),
-        ('n', 'no-status', None, _('hide status prefix')),
-        ('t', 'terse', _NOTTERSE, _('show the terse output (EXPERIMENTAL)')),
-        ('C', 'copies', None, _('show source of copied files')),
-        ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
-        ('', 'rev', [], _('show difference from revision'), _('REV')),
-        ('', 'change', '', _('list the changed files of a revision'), _('REV')),
+        (b'A', b'all', None, _(b'show status of all files')),
+        (b'm', b'modified', None, _(b'show only modified files')),
+        (b'a', b'added', None, _(b'show only added files')),
+        (b'r', b'removed', None, _(b'show only removed files')),
+        (b'd', b'deleted', None, _(b'show only deleted (but tracked) files')),
+        (b'c', b'clean', None, _(b'show only files without changes')),
+        (b'u', b'unknown', None, _(b'show only unknown (not tracked) files')),
+        (b'i', b'ignored', None, _(b'show only ignored files')),
+        (b'n', b'no-status', None, _(b'hide status prefix')),
+        (b't', b'terse', _NOTTERSE, _(b'show the terse output (EXPERIMENTAL)')),
+        (b'C', b'copies', None, _(b'show source of copied files')),
+        (
+            b'0',
+            b'print0',
+            None,
+            _(b'end filenames with NUL, for use with xargs'),
+        ),
+        (b'', b'rev', [], _(b'show difference from revision'), _(b'REV')),
+        (
+            b'',
+            b'change',
+            b'',
+            _(b'list the changed files of a revision'),
+            _(b'REV'),
+        ),
     ]
     + walkopts
     + subrepoopts
     + formatteropts,
-    _('[OPTION]... [FILE]...'),
+    _(b'[OPTION]... [FILE]...'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
     helpbasic=True,
     inferrepo=True,
@@ -6576,47 +6752,47 @@
     """
 
     opts = pycompat.byteskwargs(opts)
-    revs = opts.get('rev')
-    change = opts.get('change')
-    terse = opts.get('terse')
+    revs = opts.get(b'rev')
+    change = opts.get(b'change')
+    terse = opts.get(b'terse')
     if terse is _NOTTERSE:
         if revs:
-            terse = ''
+            terse = b''
         else:
-            terse = ui.config('commands', 'status.terse')
+            terse = ui.config(b'commands', b'status.terse')
 
     if revs and change:
-        msg = _('cannot specify --rev and --change at the same time')
+        msg = _(b'cannot specify --rev and --change at the same time')
         raise error.Abort(msg)
     elif revs and terse:
-        msg = _('cannot use --terse with --rev')
+        msg = _(b'cannot use --terse with --rev')
         raise error.Abort(msg)
     elif change:
-        repo = scmutil.unhidehashlikerevs(repo, [change], 'nowarn')
+        repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn')
         ctx2 = scmutil.revsingle(repo, change, None)
         ctx1 = ctx2.p1()
     else:
-        repo = scmutil.unhidehashlikerevs(repo, revs, 'nowarn')
+        repo = scmutil.unhidehashlikerevs(repo, revs, b'nowarn')
         ctx1, ctx2 = scmutil.revpair(repo, revs)
 
     forcerelativevalue = None
-    if ui.hasconfig('commands', 'status.relative'):
-        forcerelativevalue = ui.configbool('commands', 'status.relative')
+    if ui.hasconfig(b'commands', b'status.relative'):
+        forcerelativevalue = ui.configbool(b'commands', b'status.relative')
     uipathfn = scmutil.getuipathfn(
         repo,
         legacyrelativevalue=bool(pats),
         forcerelativevalue=forcerelativevalue,
     )
 
-    if opts.get('print0'):
-        end = '\0'
+    if opts.get(b'print0'):
+        end = b'\0'
     else:
-        end = '\n'
+        end = b'\n'
     copy = {}
-    states = 'modified added removed deleted unknown ignored clean'.split()
+    states = b'modified added removed deleted unknown ignored clean'.split()
     show = [k for k in states if opts.get(k)]
-    if opts.get('all'):
-        show += ui.quiet and (states[:4] + ['clean']) or states
+    if opts.get(b'all'):
+        show += ui.quiet and (states[:4] + [b'clean']) or states
 
     if not show:
         if ui.quiet:
@@ -6631,10 +6807,10 @@
             ctx1.node(),
             ctx2.node(),
             m,
-            'ignored' in show or 'i' in terse,
+            b'ignored' in show or b'i' in terse,
             clean=True,
             unknown=True,
-            listsubrepos=opts.get('subrepos'),
+            listsubrepos=opts.get(b'subrepos'),
         )
 
         stat = cmdutil.tersedir(stat, terse)
@@ -6643,53 +6819,53 @@
             ctx1.node(),
             ctx2.node(),
             m,
-            'ignored' in show,
-            'clean' in show,
-            'unknown' in show,
-            opts.get('subrepos'),
+            b'ignored' in show,
+            b'clean' in show,
+            b'unknown' in show,
+            opts.get(b'subrepos'),
         )
 
-    changestates = zip(states, pycompat.iterbytestr('MAR!?IC'), stat)
+    changestates = zip(states, pycompat.iterbytestr(b'MAR!?IC'), stat)
 
     if (
-        opts.get('all')
-        or opts.get('copies')
-        or ui.configbool('ui', 'statuscopies')
-    ) and not opts.get('no_status'):
+        opts.get(b'all')
+        or opts.get(b'copies')
+        or ui.configbool(b'ui', b'statuscopies')
+    ) and not opts.get(b'no_status'):
         copy = copies.pathcopies(ctx1, ctx2, m)
 
-    ui.pager('status')
-    fm = ui.formatter('status', opts)
-    fmt = '%s' + end
-    showchar = not opts.get('no_status')
+    ui.pager(b'status')
+    fm = ui.formatter(b'status', opts)
+    fmt = b'%s' + end
+    showchar = not opts.get(b'no_status')
 
     for state, char, files in changestates:
         if state in show:
-            label = 'status.' + state
+            label = b'status.' + state
             for f in files:
                 fm.startitem()
                 fm.context(ctx=ctx2)
                 fm.data(path=f)
-                fm.condwrite(showchar, 'status', '%s ', char, label=label)
+                fm.condwrite(showchar, b'status', b'%s ', char, label=label)
                 fm.plain(fmt % uipathfn(f), label=label)
                 if f in copy:
                     fm.data(source=copy[f])
                     fm.plain(
-                        ('  %s' + end) % uipathfn(copy[f]),
-                        label='status.copied',
+                        (b'  %s' + end) % uipathfn(copy[f]),
+                        label=b'status.copied',
                     )
 
     if (
-        ui.verbose or ui.configbool('commands', 'status.verbose')
+        ui.verbose or ui.configbool(b'commands', b'status.verbose')
     ) and not ui.plain():
         cmdutil.morestatus(repo, fm)
     fm.end()
 
 
 @command(
-    'summary|sum',
-    [('', 'remote', None, _('check for push and pull'))],
-    '[--remote]',
+    b'summary|sum',
+    [(b'', b'remote', None, _(b'check for push and pull'))],
+    b'[--remote]',
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
     helpbasic=True,
     intents={INTENT_READONLY},
@@ -6707,7 +6883,7 @@
     """
 
     opts = pycompat.byteskwargs(opts)
-    ui.pager('summary')
+    ui.pager(b'summary')
     ctx = repo[None]
     parents = ctx.parents()
     pnode = parents[0].node()
@@ -6716,9 +6892,9 @@
     try:
         ms = mergemod.mergestate.read(repo)
     except error.UnsupportedMergeRecords as e:
-        s = ' '.join(e.recordtypes)
+        s = b' '.join(e.recordtypes)
         ui.warn(
-            _('warning: merge state has unsupported record types: %s\n') % s
+            _(b'warning: merge state has unsupported record types: %s\n') % s
         )
         unresolved = []
     else:
@@ -6729,54 +6905,54 @@
         # shows a working directory parent *changeset*:
         # i18n: column positioning for "hg summary"
         ui.write(
-            _('parent: %d:%s ') % (p.rev(), p),
+            _(b'parent: %d:%s ') % (p.rev(), p),
             label=logcmdutil.changesetlabels(p),
         )
-        ui.write(' '.join(p.tags()), label='log.tag')
+        ui.write(b' '.join(p.tags()), label=b'log.tag')
         if p.bookmarks():
             marks.extend(p.bookmarks())
         if p.rev() == -1:
             if not len(repo):
-                ui.write(_(' (empty repository)'))
+                ui.write(_(b' (empty repository)'))
             else:
-                ui.write(_(' (no revision checked out)'))
+                ui.write(_(b' (no revision checked out)'))
         if p.obsolete():
-            ui.write(_(' (obsolete)'))
+            ui.write(_(b' (obsolete)'))
         if p.isunstable():
             instabilities = (
-                ui.label(instability, 'trouble.%s' % instability)
+                ui.label(instability, b'trouble.%s' % instability)
                 for instability in p.instabilities()
             )
-            ui.write(' (' + ', '.join(instabilities) + ')')
-        ui.write('\n')
+            ui.write(b' (' + b', '.join(instabilities) + b')')
+        ui.write(b'\n')
         if p.description():
             ui.status(
-                ' ' + p.description().splitlines()[0].strip() + '\n',
-                label='log.summary',
+                b' ' + p.description().splitlines()[0].strip() + b'\n',
+                label=b'log.summary',
             )
 
     branch = ctx.branch()
     bheads = repo.branchheads(branch)
     # i18n: column positioning for "hg summary"
-    m = _('branch: %s\n') % branch
-    if branch != 'default':
-        ui.write(m, label='log.branch')
+    m = _(b'branch: %s\n') % branch
+    if branch != b'default':
+        ui.write(m, label=b'log.branch')
     else:
-        ui.status(m, label='log.branch')
+        ui.status(m, label=b'log.branch')
 
     if marks:
         active = repo._activebookmark
         # i18n: column positioning for "hg summary"
-        ui.write(_('bookmarks:'), label='log.bookmark')
+        ui.write(_(b'bookmarks:'), label=b'log.bookmark')
         if active is not None:
             if active in marks:
-                ui.write(' *' + active, label=bookmarks.activebookmarklabel)
+                ui.write(b' *' + active, label=bookmarks.activebookmarklabel)
                 marks.remove(active)
             else:
-                ui.write(' [%s]' % active, label=bookmarks.activebookmarklabel)
+                ui.write(b' [%s]' % active, label=bookmarks.activebookmarklabel)
         for m in marks:
-            ui.write(' ' + m, label='log.bookmark')
-        ui.write('\n', label='log.bookmark')
+            ui.write(b' ' + m, label=b'log.bookmark')
+        ui.write(b'\n', label=b'log.bookmark')
 
     status = repo.status(unknown=True)
 
@@ -6794,36 +6970,36 @@
     subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
 
     labels = [
-        (ui.label(_('%d modified'), 'status.modified'), status.modified),
-        (ui.label(_('%d added'), 'status.added'), status.added),
-        (ui.label(_('%d removed'), 'status.removed'), status.removed),
-        (ui.label(_('%d renamed'), 'status.copied'), renamed),
-        (ui.label(_('%d copied'), 'status.copied'), copied),
-        (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
-        (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
-        (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
-        (ui.label(_('%d subrepos'), 'status.modified'), subs),
+        (ui.label(_(b'%d modified'), b'status.modified'), status.modified),
+        (ui.label(_(b'%d added'), b'status.added'), status.added),
+        (ui.label(_(b'%d removed'), b'status.removed'), status.removed),
+        (ui.label(_(b'%d renamed'), b'status.copied'), renamed),
+        (ui.label(_(b'%d copied'), b'status.copied'), copied),
+        (ui.label(_(b'%d deleted'), b'status.deleted'), status.deleted),
+        (ui.label(_(b'%d unknown'), b'status.unknown'), status.unknown),
+        (ui.label(_(b'%d unresolved'), b'resolve.unresolved'), unresolved),
+        (ui.label(_(b'%d subrepos'), b'status.modified'), subs),
     ]
     t = []
     for l, s in labels:
         if s:
             t.append(l % len(s))
 
-    t = ', '.join(t)
+    t = b', '.join(t)
     cleanworkdir = False
 
-    if repo.vfs.exists('graftstate'):
-        t += _(' (graft in progress)')
-    if repo.vfs.exists('updatestate'):
-        t += _(' (interrupted update)')
+    if repo.vfs.exists(b'graftstate'):
+        t += _(b' (graft in progress)')
+    if repo.vfs.exists(b'updatestate'):
+        t += _(b' (interrupted update)')
     elif len(parents) > 1:
-        t += _(' (merge)')
+        t += _(b' (merge)')
     elif branch != parents[0].branch():
-        t += _(' (new branch)')
+        t += _(b' (new branch)')
     elif parents[0].closesbranch() and pnode in repo.branchheads(
         branch, closed=True
     ):
-        t += _(' (head closed)')
+        t += _(b' (head closed)')
     elif not (
         status.modified
         or status.added
@@ -6832,10 +7008,10 @@
         or copied
         or subs
     ):
-        t += _(' (clean)')
+        t += _(b' (clean)')
         cleanworkdir = True
     elif pnode not in bheads:
-        t += _(' (new branch head)')
+        t += _(b' (new branch head)')
 
     if parents:
         pendingphase = max(p.phase() for p in parents)
@@ -6843,14 +7019,14 @@
         pendingphase = phases.public
 
     if pendingphase > phases.newcommitphase(ui):
-        t += ' (%s)' % phases.phasenames[pendingphase]
+        t += b' (%s)' % phases.phasenames[pendingphase]
 
     if cleanworkdir:
         # i18n: column positioning for "hg summary"
-        ui.status(_('commit: %s\n') % t.strip())
+        ui.status(_(b'commit: %s\n') % t.strip())
     else:
         # i18n: column positioning for "hg summary"
-        ui.write(_('commit: %s\n') % t.strip())
+        ui.write(_(b'commit: %s\n') % t.strip())
 
     # all ancestors of branch heads - all ancestors of parent = new csets
     new = len(
@@ -6859,43 +7035,43 @@
 
     if new == 0:
         # i18n: column positioning for "hg summary"
-        ui.status(_('update: (current)\n'))
+        ui.status(_(b'update: (current)\n'))
     elif pnode not in bheads:
         # i18n: column positioning for "hg summary"
-        ui.write(_('update: %d new changesets (update)\n') % new)
+        ui.write(_(b'update: %d new changesets (update)\n') % new)
     else:
         # i18n: column positioning for "hg summary"
         ui.write(
-            _('update: %d new changesets, %d branch heads (merge)\n')
+            _(b'update: %d new changesets, %d branch heads (merge)\n')
             % (new, len(bheads))
         )
 
     t = []
-    draft = len(repo.revs('draft()'))
+    draft = len(repo.revs(b'draft()'))
     if draft:
-        t.append(_('%d draft') % draft)
-    secret = len(repo.revs('secret()'))
+        t.append(_(b'%d draft') % draft)
+    secret = len(repo.revs(b'secret()'))
     if secret:
-        t.append(_('%d secret') % secret)
+        t.append(_(b'%d secret') % secret)
 
     if draft or secret:
-        ui.status(_('phases: %s\n') % ', '.join(t))
+        ui.status(_(b'phases: %s\n') % b', '.join(t))
 
     if obsolete.isenabled(repo, obsolete.createmarkersopt):
-        for trouble in ("orphan", "contentdivergent", "phasedivergent"):
-            numtrouble = len(repo.revs(trouble + "()"))
+        for trouble in (b"orphan", b"contentdivergent", b"phasedivergent"):
+            numtrouble = len(repo.revs(trouble + b"()"))
             # We write all the possibilities to ease translation
             troublemsg = {
-                "orphan": _("orphan: %d changesets"),
-                "contentdivergent": _("content-divergent: %d changesets"),
-                "phasedivergent": _("phase-divergent: %d changesets"),
+                b"orphan": _(b"orphan: %d changesets"),
+                b"contentdivergent": _(b"content-divergent: %d changesets"),
+                b"phasedivergent": _(b"phase-divergent: %d changesets"),
             }
             if numtrouble > 0:
-                ui.status(troublemsg[trouble] % numtrouble + "\n")
+                ui.status(troublemsg[trouble] % numtrouble + b"\n")
 
     cmdutil.summaryhooks(ui, repo)
 
-    if opts.get('remote'):
+    if opts.get(b'remote'):
         needsincoming, needsoutgoing = True, True
     else:
         needsincoming, needsoutgoing = False, False
@@ -6908,18 +7084,18 @@
             return
 
     def getincoming():
-        source, branches = hg.parseurl(ui.expandpath('default'))
+        source, branches = hg.parseurl(ui.expandpath(b'default'))
         sbranch = branches[0]
         try:
             other = hg.peer(repo, {}, source)
         except error.RepoError:
-            if opts.get('remote'):
+            if opts.get(b'remote'):
                 raise
             return source, sbranch, None, None, None
         revs, checkout = hg.addbranchrevs(repo, other, branches, None)
         if revs:
             revs = [other.lookup(rev) for rev in revs]
-        ui.debug('comparing with %s\n' % util.hidepassword(source))
+        ui.debug(b'comparing with %s\n' % util.hidepassword(source))
         repo.ui.pushbuffer()
         commoninc = discovery.findcommonincoming(repo, other, heads=revs)
         repo.ui.popbuffer()
@@ -6931,17 +7107,17 @@
         source = sbranch = sother = commoninc = incoming = None
 
     def getoutgoing():
-        dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
+        dest, branches = hg.parseurl(ui.expandpath(b'default-push', b'default'))
         dbranch = branches[0]
         revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
         if source != dest:
             try:
                 dother = hg.peer(repo, {}, dest)
             except error.RepoError:
-                if opts.get('remote'):
+                if opts.get(b'remote'):
                     raise
                 return dest, dbranch, None, None
-            ui.debug('comparing with %s\n' % util.hidepassword(dest))
+            ui.debug(b'comparing with %s\n' % util.hidepassword(dest))
         elif sother is None:
             # there is no explicit destination peer, but source one is invalid
             return dest, dbranch, None, None
@@ -6965,27 +7141,27 @@
     else:
         dest = dbranch = dother = outgoing = None
 
-    if opts.get('remote'):
+    if opts.get(b'remote'):
         t = []
         if incoming:
-            t.append(_('1 or more incoming'))
+            t.append(_(b'1 or more incoming'))
         o = outgoing.missing
         if o:
-            t.append(_('%d outgoing') % len(o))
+            t.append(_(b'%d outgoing') % len(o))
         other = dother or sother
-        if 'bookmarks' in other.listkeys('namespaces'):
+        if b'bookmarks' in other.listkeys(b'namespaces'):
             counts = bookmarks.summary(repo, other)
             if counts[0] > 0:
-                t.append(_('%d incoming bookmarks') % counts[0])
+                t.append(_(b'%d incoming bookmarks') % counts[0])
             if counts[1] > 0:
-                t.append(_('%d outgoing bookmarks') % counts[1])
+                t.append(_(b'%d outgoing bookmarks') % counts[1])
 
         if t:
             # i18n: column positioning for "hg summary"
-            ui.write(_('remote: %s\n') % (', '.join(t)))
+            ui.write(_(b'remote: %s\n') % (b', '.join(t)))
         else:
             # i18n: column positioning for "hg summary"
-            ui.status(_('remote: (synced)\n'))
+            ui.status(_(b'remote: (synced)\n'))
 
     cmdutil.summaryremotehooks(
         ui,
@@ -6999,18 +7175,18 @@
 
 
 @command(
-    'tag',
+    b'tag',
     [
-        ('f', 'force', None, _('force tag')),
-        ('l', 'local', None, _('make the tag local')),
-        ('r', 'rev', '', _('revision to tag'), _('REV')),
-        ('', 'remove', None, _('remove a tag')),
+        (b'f', b'force', None, _(b'force tag')),
+        (b'l', b'local', None, _(b'make the tag local')),
+        (b'r', b'rev', b'', _(b'revision to tag'), _(b'REV')),
+        (b'', b'remove', None, _(b'remove a tag')),
         # -l/--local is already there, commitopts cannot be used
-        ('e', 'edit', None, _('invoke editor on commit messages')),
-        ('m', 'message', '', _('use text as commit message'), _('TEXT')),
+        (b'e', b'edit', None, _(b'invoke editor on commit messages')),
+        (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
     ]
     + commitopts2,
-    _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'),
+    _(b'[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def tag(ui, repo, name1, *names, **opts):
@@ -7047,105 +7223,107 @@
     """
     opts = pycompat.byteskwargs(opts)
     with repo.wlock(), repo.lock():
-        rev_ = "."
+        rev_ = b"."
         names = [t.strip() for t in (name1,) + names]
         if len(names) != len(set(names)):
-            raise error.Abort(_('tag names must be unique'))
+            raise error.Abort(_(b'tag names must be unique'))
         for n in names:
-            scmutil.checknewlabel(repo, n, 'tag')
+            scmutil.checknewlabel(repo, n, b'tag')
             if not n:
                 raise error.Abort(
-                    _('tag names cannot consist entirely of ' 'whitespace')
+                    _(b'tag names cannot consist entirely of ' b'whitespace')
                 )
-        if opts.get('rev') and opts.get('remove'):
-            raise error.Abort(_("--rev and --remove are incompatible"))
-        if opts.get('rev'):
-            rev_ = opts['rev']
-        message = opts.get('message')
-        if opts.get('remove'):
-            if opts.get('local'):
-                expectedtype = 'local'
+        if opts.get(b'rev') and opts.get(b'remove'):
+            raise error.Abort(_(b"--rev and --remove are incompatible"))
+        if opts.get(b'rev'):
+            rev_ = opts[b'rev']
+        message = opts.get(b'message')
+        if opts.get(b'remove'):
+            if opts.get(b'local'):
+                expectedtype = b'local'
             else:
-                expectedtype = 'global'
+                expectedtype = b'global'
 
             for n in names:
-                if repo.tagtype(n) == 'global':
+                if repo.tagtype(n) == b'global':
                     alltags = tagsmod.findglobaltags(ui, repo)
                     if alltags[n][0] == nullid:
-                        raise error.Abort(_("tag '%s' is already removed") % n)
+                        raise error.Abort(_(b"tag '%s' is already removed") % n)
                 if not repo.tagtype(n):
-                    raise error.Abort(_("tag '%s' does not exist") % n)
+                    raise error.Abort(_(b"tag '%s' does not exist") % n)
                 if repo.tagtype(n) != expectedtype:
-                    if expectedtype == 'global':
-                        raise error.Abort(_("tag '%s' is not a global tag") % n)
+                    if expectedtype == b'global':
+                        raise error.Abort(
+                            _(b"tag '%s' is not a global tag") % n
+                        )
                     else:
-                        raise error.Abort(_("tag '%s' is not a local tag") % n)
-            rev_ = 'null'
+                        raise error.Abort(_(b"tag '%s' is not a local tag") % n)
+            rev_ = b'null'
             if not message:
                 # we don't translate commit messages
-                message = 'Removed tag %s' % ', '.join(names)
-        elif not opts.get('force'):
+                message = b'Removed tag %s' % b', '.join(names)
+        elif not opts.get(b'force'):
             for n in names:
                 if n in repo.tags():
                     raise error.Abort(
-                        _("tag '%s' already exists " "(use -f to force)") % n
+                        _(b"tag '%s' already exists " b"(use -f to force)") % n
                     )
-        if not opts.get('local'):
+        if not opts.get(b'local'):
             p1, p2 = repo.dirstate.parents()
             if p2 != nullid:
-                raise error.Abort(_('uncommitted merge'))
+                raise error.Abort(_(b'uncommitted merge'))
             bheads = repo.branchheads()
-            if not opts.get('force') and bheads and p1 not in bheads:
+            if not opts.get(b'force') and bheads and p1 not in bheads:
                 raise error.Abort(
                     _(
-                        'working directory is not at a branch head '
-                        '(use -f to force)'
+                        b'working directory is not at a branch head '
+                        b'(use -f to force)'
                     )
                 )
         node = scmutil.revsingle(repo, rev_).node()
 
         if not message:
             # we don't translate commit messages
-            message = 'Added tag %s for changeset %s' % (
-                ', '.join(names),
+            message = b'Added tag %s for changeset %s' % (
+                b', '.join(names),
                 short(node),
             )
 
-        date = opts.get('date')
+        date = opts.get(b'date')
         if date:
             date = dateutil.parsedate(date)
 
-        if opts.get('remove'):
-            editform = 'tag.remove'
+        if opts.get(b'remove'):
+            editform = b'tag.remove'
         else:
-            editform = 'tag.add'
+            editform = b'tag.add'
         editor = cmdutil.getcommiteditor(
             editform=editform, **pycompat.strkwargs(opts)
         )
 
         # don't allow tagging the null rev
         if (
-            not opts.get('remove')
+            not opts.get(b'remove')
             and scmutil.revsingle(repo, rev_).rev() == nullrev
         ):
-            raise error.Abort(_("cannot tag null revision"))
+            raise error.Abort(_(b"cannot tag null revision"))
 
         tagsmod.tag(
             repo,
             names,
             node,
             message,
-            opts.get('local'),
-            opts.get('user'),
+            opts.get(b'local'),
+            opts.get(b'user'),
             date,
             editor=editor,
         )
 
 
 @command(
-    'tags',
+    b'tags',
     formatteropts,
-    '',
+    b'',
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
     intents={INTENT_READONLY},
 )
@@ -7170,45 +7348,45 @@
     """
 
     opts = pycompat.byteskwargs(opts)
-    ui.pager('tags')
-    fm = ui.formatter('tags', opts)
+    ui.pager(b'tags')
+    fm = ui.formatter(b'tags', opts)
     hexfunc = fm.hexfunc
 
     for t, n in reversed(repo.tagslist()):
         hn = hexfunc(n)
-        label = 'tags.normal'
-        tagtype = ''
-        if repo.tagtype(t) == 'local':
-            label = 'tags.local'
-            tagtype = 'local'
+        label = b'tags.normal'
+        tagtype = b''
+        if repo.tagtype(t) == b'local':
+            label = b'tags.local'
+            tagtype = b'local'
 
         fm.startitem()
         fm.context(repo=repo)
-        fm.write('tag', '%s', t, label=label)
-        fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
+        fm.write(b'tag', b'%s', t, label=label)
+        fmt = b" " * (30 - encoding.colwidth(t)) + b' %5d:%s'
         fm.condwrite(
             not ui.quiet,
-            'rev node',
+            b'rev node',
             fmt,
             repo.changelog.rev(n),
             hn,
             label=label,
         )
         fm.condwrite(
-            ui.verbose and tagtype, 'type', ' %s', tagtype, label=label
+            ui.verbose and tagtype, b'type', b' %s', tagtype, label=label
         )
-        fm.plain('\n')
+        fm.plain(b'\n')
     fm.end()
 
 
 @command(
-    'tip',
+    b'tip',
     [
-        ('p', 'patch', None, _('show patch')),
-        ('g', 'git', None, _('use git extended diff format')),
+        (b'p', b'patch', None, _(b'show patch')),
+        (b'g', b'git', None, _(b'use git extended diff format')),
     ]
     + templateopts,
-    _('[-p] [-g]'),
+    _(b'[-p] [-g]'),
     helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
 )
 def tip(ui, repo, **opts):
@@ -7229,21 +7407,21 @@
     """
     opts = pycompat.byteskwargs(opts)
     displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
-    displayer.show(repo['tip'])
+    displayer.show(repo[b'tip'])
     displayer.close()
 
 
 @command(
-    'unbundle',
+    b'unbundle',
     [
         (
-            'u',
-            'update',
+            b'u',
+            b'update',
             None,
-            _('update to new branch head if changesets were unbundled'),
+            _(b'update to new branch head if changesets were unbundled'),
         )
     ],
-    _('[-u] FILE...'),
+    _(b'[-u] FILE...'),
     helpcategory=command.CATEGORY_IMPORT_EXPORT,
 )
 def unbundle(ui, repo, fname1, *fnames, **opts):
@@ -7261,25 +7439,28 @@
             gen = exchange.readbundle(ui, f, fname)
             if isinstance(gen, streamclone.streamcloneapplier):
                 raise error.Abort(
-                    _('packed bundles cannot be applied with ' '"hg unbundle"'),
-                    hint=_('use "hg debugapplystreamclonebundle"'),
+                    _(
+                        b'packed bundles cannot be applied with '
+                        b'"hg unbundle"'
+                    ),
+                    hint=_(b'use "hg debugapplystreamclonebundle"'),
                 )
-            url = 'bundle:' + fname
+            url = b'bundle:' + fname
             try:
-                txnname = 'unbundle'
+                txnname = b'unbundle'
                 if not isinstance(gen, bundle2.unbundle20):
-                    txnname = 'unbundle\n%s' % util.hidepassword(url)
+                    txnname = b'unbundle\n%s' % util.hidepassword(url)
                 with repo.transaction(txnname) as tr:
                     op = bundle2.applybundle(
-                        repo, gen, tr, source='unbundle', url=url
+                        repo, gen, tr, source=b'unbundle', url=url
                     )
             except error.BundleUnknownFeatureError as exc:
                 raise error.Abort(
-                    _('%s: unknown bundle feature, %s') % (fname, exc),
+                    _(b'%s: unknown bundle feature, %s') % (fname, exc),
                     hint=_(
-                        "see https://mercurial-scm.org/"
-                        "wiki/BundleFeature for more "
-                        "information"
+                        b"see https://mercurial-scm.org/"
+                        b"wiki/BundleFeature for more "
+                        b"information"
                     ),
                 )
             modheads = bundle2.combinechangegroupresults(op)
@@ -7288,29 +7469,34 @@
 
 
 @command(
-    'unshelve',
+    b'unshelve',
     [
-        ('a', 'abort', None, _('abort an incomplete unshelve operation')),
-        ('c', 'continue', None, _('continue an incomplete unshelve operation')),
-        ('i', 'interactive', None, _('use interactive mode (EXPERIMENTAL)')),
-        ('k', 'keep', None, _('keep shelve after unshelving')),
+        (b'a', b'abort', None, _(b'abort an incomplete unshelve operation')),
+        (
+            b'c',
+            b'continue',
+            None,
+            _(b'continue an incomplete unshelve operation'),
+        ),
+        (b'i', b'interactive', None, _(b'use interactive mode (EXPERIMENTAL)')),
+        (b'k', b'keep', None, _(b'keep shelve after unshelving')),
         (
-            'n',
-            'name',
-            '',
-            _('restore shelved change with given name'),
-            _('NAME'),
+            b'n',
+            b'name',
+            b'',
+            _(b'restore shelved change with given name'),
+            _(b'NAME'),
         ),
-        ('t', 'tool', '', _('specify merge tool')),
+        (b't', b'tool', b'', _(b'specify merge tool')),
         (
-            '',
-            'date',
-            '',
-            _('set date for temporary commits (DEPRECATED)'),
-            _('DATE'),
+            b'',
+            b'date',
+            b'',
+            _(b'set date for temporary commits (DEPRECATED)'),
+            _(b'DATE'),
         ),
     ],
-    _('hg unshelve [OPTION]... [FILE]... [-n SHELVED]'),
+    _(b'hg unshelve [OPTION]... [FILE]... [-n SHELVED]'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
 )
 def unshelve(ui, repo, *shelved, **opts):
@@ -7362,26 +7548,26 @@
 
 
 statemod.addunfinished(
-    'unshelve',
-    fname='shelvedstate',
+    b'unshelve',
+    fname=b'shelvedstate',
     continueflag=True,
     abortfunc=shelvemod.hgabortunshelve,
     continuefunc=shelvemod.hgcontinueunshelve,
-    cmdmsg=_('unshelve already in progress'),
+    cmdmsg=_(b'unshelve already in progress'),
 )
 
 
 @command(
-    'update|up|checkout|co',
+    b'update|up|checkout|co',
     [
-        ('C', 'clean', None, _('discard uncommitted changes (no backup)')),
-        ('c', 'check', None, _('require clean working directory')),
-        ('m', 'merge', None, _('merge uncommitted changes')),
-        ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
-        ('r', 'rev', '', _('revision'), _('REV')),
+        (b'C', b'clean', None, _(b'discard uncommitted changes (no backup)')),
+        (b'c', b'check', None, _(b'require clean working directory')),
+        (b'm', b'merge', None, _(b'merge uncommitted changes')),
+        (b'd', b'date', b'', _(b'tipmost revision matching date'), _(b'DATE')),
+        (b'r', b'rev', b'', _(b'revision'), _(b'REV')),
     ]
     + mergetoolopts,
-    _('[-C|-c|-m] [-d DATE] [[-r] REV]'),
+    _(b'[-C|-c|-m] [-d DATE] [[-r] REV]'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
     helpbasic=True,
 )
@@ -7446,34 +7632,34 @@
     check = opts.get(r'check')
     merge = opts.get(r'merge')
     if rev and node:
-        raise error.Abort(_("please specify just one revision"))
-
-    if ui.configbool('commands', 'update.requiredest'):
+        raise error.Abort(_(b"please specify just one revision"))
+
+    if ui.configbool(b'commands', b'update.requiredest'):
         if not node and not rev and not date:
             raise error.Abort(
-                _('you must specify a destination'),
-                hint=_('for example: hg update ".::"'),
+                _(b'you must specify a destination'),
+                hint=_(b'for example: hg update ".::"'),
             )
 
-    if rev is None or rev == '':
+    if rev is None or rev == b'':
         rev = node
 
     if date and rev is not None:
-        raise error.Abort(_("you can't specify a revision and a date"))
+        raise error.Abort(_(b"you can't specify a revision and a date"))
 
     if len([x for x in (clean, check, merge) if x]) > 1:
         raise error.Abort(
             _(
-                "can only specify one of -C/--clean, -c/--check, "
-                "or -m/--merge"
+                b"can only specify one of -C/--clean, -c/--check, "
+                b"or -m/--merge"
             )
         )
 
     updatecheck = None
     if check:
-        updatecheck = 'abort'
+        updatecheck = b'abort'
     elif merge:
-        updatecheck = 'none'
+        updatecheck = b'none'
 
     with repo.wlock():
         cmdutil.clearunfinished(repo)
@@ -7483,28 +7669,28 @@
         # if we defined a bookmark, we have to remember the original name
         brev = rev
         if rev:
-            repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
+            repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
         ctx = scmutil.revsingle(repo, rev, default=None)
         rev = ctx.rev()
         hidden = ctx.hidden()
-        overrides = {('ui', 'forcemerge'): opts.get(r'tool', '')}
-        with ui.configoverride(overrides, 'update'):
+        overrides = {(b'ui', b'forcemerge'): opts.get(r'tool', b'')}
+        with ui.configoverride(overrides, b'update'):
             ret = hg.updatetotally(
                 ui, repo, rev, brev, clean=clean, updatecheck=updatecheck
             )
         if hidden:
             ctxstr = ctx.hex()[:12]
-            ui.warn(_("updated to hidden changeset %s\n") % ctxstr)
+            ui.warn(_(b"updated to hidden changeset %s\n") % ctxstr)
 
             if ctx.obsolete():
                 obsfatemsg = obsutil._getfilteredreason(repo, ctxstr, ctx)
-                ui.warn("(%s)\n" % obsfatemsg)
+                ui.warn(b"(%s)\n" % obsfatemsg)
         return ret
 
 
 @command(
-    'verify',
-    [('', 'full', False, 'perform more checks (EXPERIMENTAL)')],
+    b'verify',
+    [(b'', b'full', False, b'perform more checks (EXPERIMENTAL)')],
     helpcategory=command.CATEGORY_MAINTENANCE,
 )
 def verify(ui, repo, **opts):
@@ -7526,13 +7712,13 @@
     opts = pycompat.byteskwargs(opts)
 
     level = None
-    if opts['full']:
+    if opts[b'full']:
         level = verifymod.VERIFY_FULL
     return hg.verify(repo, level)
 
 
 @command(
-    'version',
+    b'version',
     [] + formatteropts,
     helpcategory=command.CATEGORY_HELP,
     norepo=True,
@@ -7558,24 +7744,24 @@
     """
     opts = pycompat.byteskwargs(opts)
     if ui.verbose:
-        ui.pager('version')
-    fm = ui.formatter("version", opts)
+        ui.pager(b'version')
+    fm = ui.formatter(b"version", opts)
     fm.startitem()
     fm.write(
-        "ver", _("Mercurial Distributed SCM (version %s)\n"), util.version()
+        b"ver", _(b"Mercurial Distributed SCM (version %s)\n"), util.version()
     )
     license = _(
-        "(see https://mercurial-scm.org for more information)\n"
-        "\nCopyright (C) 2005-2019 Matt Mackall and others\n"
-        "This is free software; see the source for copying conditions. "
-        "There is NO\nwarranty; "
-        "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
+        b"(see https://mercurial-scm.org for more information)\n"
+        b"\nCopyright (C) 2005-2019 Matt Mackall and others\n"
+        b"This is free software; see the source for copying conditions. "
+        b"There is NO\nwarranty; "
+        b"not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
     )
     if not ui.quiet:
         fm.plain(license)
 
     if ui.verbose:
-        fm.plain(_("\nEnabled extensions:\n\n"))
+        fm.plain(_(b"\nEnabled extensions:\n\n"))
     # format names and versions into columns
     names = []
     vers = []
@@ -7584,19 +7770,19 @@
         names.append(name)
         vers.append(extensions.moduleversion(module) or None)
         isinternals.append(extensions.ismoduleinternal(module))
-    fn = fm.nested("extensions", tmpl='{name}\n')
+    fn = fm.nested(b"extensions", tmpl=b'{name}\n')
     if names:
-        namefmt = "  %%-%ds  " % max(len(n) for n in names)
-        places = [_("external"), _("internal")]
+        namefmt = b"  %%-%ds  " % max(len(n) for n in names)
+        places = [_(b"external"), _(b"internal")]
         for n, v, p in zip(names, vers, isinternals):
             fn.startitem()
-            fn.condwrite(ui.verbose, "name", namefmt, n)
+            fn.condwrite(ui.verbose, b"name", namefmt, n)
             if ui.verbose:
-                fn.plain("%s  " % places[p])
+                fn.plain(b"%s  " % places[p])
             fn.data(bundled=p)
-            fn.condwrite(ui.verbose and v, "ver", "%s", v)
+            fn.condwrite(ui.verbose and v, b"ver", b"%s", v)
             if ui.verbose:
-                fn.plain("\n")
+                fn.plain(b"\n")
     fn.end()
     fm.end()
 
@@ -7607,7 +7793,7 @@
     overrides = [cmd for cmd in cmdtable if cmd in table]
     if overrides:
         ui.warn(
-            _("extension '%s' overrides commands: %s\n")
-            % (name, " ".join(overrides))
+            _(b"extension '%s' overrides commands: %s\n")
+            % (name, b" ".join(overrides))
         )
     table.update(cmdtable)
--- a/mercurial/commandserver.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/commandserver.py	Sun Oct 06 09:48:39 2019 -0400
@@ -53,13 +53,13 @@
 
     @property
     def name(self):
-        return '<%c-channel>' % self.channel
+        return b'<%c-channel>' % self.channel
 
     def write(self, data):
         if not data:
             return
         # single write() to guarantee the same atomicity as the underlying file
-        self.out.write(struct.pack('>cI', self.channel, len(data)) + data)
+        self.out.write(struct.pack(b'>cI', self.channel, len(data)) + data)
         self.out.flush()
 
     def __getattr__(self, attr):
@@ -119,7 +119,7 @@
 
     @property
     def name(self):
-        return '<%c-channel>' % self.channel
+        return b'<%c-channel>' % self.channel
 
     def read(self, size=-1):
         if size < 0:
@@ -138,34 +138,34 @@
 
     def _read(self, size, channel):
         if not size:
-            return ''
+            return b''
         assert size > 0
 
         # tell the client we need at most size bytes
-        self.out.write(struct.pack('>cI', channel, size))
+        self.out.write(struct.pack(b'>cI', channel, size))
         self.out.flush()
 
         length = self.in_.read(4)
-        length = struct.unpack('>I', length)[0]
+        length = struct.unpack(b'>I', length)[0]
         if not length:
-            return ''
+            return b''
         else:
             return self.in_.read(length)
 
     def readline(self, size=-1):
         if size < 0:
             size = self.maxchunksize
-            s = self._read(size, 'L')
+            s = self._read(size, b'L')
             buf = s
             # keep asking for more until there's either no more or
             # we got a full line
-            while s and s[-1] != '\n':
-                s = self._read(size, 'L')
+            while s and s[-1] != b'\n':
+                s = self._read(size, b'L')
                 buf += s
 
             return buf
         else:
-            return self._read(size, 'L')
+            return self._read(size, b'L')
 
     def __iter__(self):
         return self
@@ -221,11 +221,11 @@
             self.repo = self.repoui = None
         self._prereposetups = prereposetups
 
-        self.cdebug = channeledoutput(fout, 'd')
-        self.cerr = channeledoutput(fout, 'e')
-        self.cout = channeledoutput(fout, 'o')
-        self.cin = channeledinput(fin, fout, 'I')
-        self.cresult = channeledoutput(fout, 'r')
+        self.cdebug = channeledoutput(fout, b'd')
+        self.cerr = channeledoutput(fout, b'e')
+        self.cout = channeledoutput(fout, b'o')
+        self.cin = channeledinput(fin, fout, b'I')
+        self.cresult = channeledoutput(fout, b'r')
 
         if self.ui.config(b'cmdserver', b'log') == b'-':
             # switch log stream of server's ui to the 'd' (debug) channel
@@ -248,7 +248,7 @@
 
     def _read(self, size):
         if not size:
-            return ''
+            return b''
 
         data = self.client.read(size)
 
@@ -264,16 +264,16 @@
         format:
         data length (uint32), data
         """
-        length = struct.unpack('>I', self._read(4))[0]
+        length = struct.unpack(b'>I', self._read(4))[0]
         if not length:
-            return ''
+            return b''
         return self._read(length)
 
     def _readlist(self):
         """read a list of NULL separated strings from the channel"""
         s = self._readstr()
         if s:
-            return s.split('\0')
+            return s.split(b'\0')
         else:
             return []
 
@@ -302,8 +302,8 @@
             # any kind of interaction must use server channels, but chg may
             # replace channels by fully functional tty files. so nontty is
             # enforced only if cin is a channel.
-            if not util.safehasattr(self.cin, 'fileno'):
-                ui.setconfig('ui', 'nontty', 'true', 'commandserver')
+            if not util.safehasattr(self.cin, b'fileno'):
+                ui.setconfig(b'ui', b'nontty', b'true', b'commandserver')
 
         req = dispatch.request(
             args[:],
@@ -318,10 +318,10 @@
 
         try:
             ret = dispatch.dispatch(req) & 255
-            self.cresult.write(struct.pack('>i', int(ret)))
+            self.cresult.write(struct.pack(b'>i', int(ret)))
         finally:
             # restore old cwd
-            if '--cwd' in args:
+            if b'--cwd' in args:
                 os.chdir(self.cwd)
 
     def getencoding(self):
@@ -337,23 +337,23 @@
             else:
                 # clients are expected to check what commands are supported by
                 # looking at the servers capabilities
-                raise error.Abort(_('unknown command %s') % cmd)
+                raise error.Abort(_(b'unknown command %s') % cmd)
 
-        return cmd != ''
+        return cmd != b''
 
-    capabilities = {'runcommand': runcommand, 'getencoding': getencoding}
+    capabilities = {b'runcommand': runcommand, b'getencoding': getencoding}
 
     def serve(self):
-        hellomsg = 'capabilities: ' + ' '.join(sorted(self.capabilities))
-        hellomsg += '\n'
-        hellomsg += 'encoding: ' + encoding.encoding
-        hellomsg += '\n'
+        hellomsg = b'capabilities: ' + b' '.join(sorted(self.capabilities))
+        hellomsg += b'\n'
+        hellomsg += b'encoding: ' + encoding.encoding
+        hellomsg += b'\n'
         if self.cmsg:
-            hellomsg += 'message-encoding: %s\n' % self.cmsg.encoding
-        hellomsg += 'pid: %d' % procutil.getpid()
-        if util.safehasattr(os, 'getpgid'):
-            hellomsg += '\n'
-            hellomsg += 'pgid: %d' % os.getpgid(0)
+            hellomsg += b'message-encoding: %s\n' % self.cmsg.encoding
+        hellomsg += b'pid: %d' % procutil.getpid()
+        if util.safehasattr(os, b'getpgid'):
+            hellomsg += b'\n'
+            hellomsg += b'pgid: %d' % os.getpgid(0)
 
         # write the hello msg in -one- chunk
         self.cout.write(hellomsg)
@@ -459,7 +459,7 @@
         # handle exceptions that may be raised by command server. most of
         # known exceptions are caught by dispatch.
         except error.Abort as inst:
-            ui.error(_('abort: %s\n') % inst)
+            ui.error(_(b'abort: %s\n') % inst)
         except IOError as inst:
             if inst.errno != errno.EPIPE:
                 raise
@@ -473,7 +473,7 @@
         if sv:
             cerr = sv.cerr
         else:
-            cerr = channeledoutput(fout, 'e')
+            cerr = channeledoutput(fout, b'e')
         cerr.write(encoding.strtolocal(traceback.format_exc()))
         raise
     finally:
@@ -500,7 +500,7 @@
     def bindsocket(self, sock, address):
         util.bindunixsocket(sock, address)
         sock.listen(socket.SOMAXCONN)
-        self.ui.status(_('listening at %s\n') % address)
+        self.ui.status(_(b'listening at %s\n') % address)
         self.ui.flush()  # avoid buffering of status message
 
     def unlinksocket(self, address):
@@ -527,11 +527,11 @@
     def __init__(self, ui, repo, opts, handler=None):
         self.ui = ui
         self.repo = repo
-        self.address = opts['address']
-        if not util.safehasattr(socket, 'AF_UNIX'):
-            raise error.Abort(_('unsupported platform'))
+        self.address = opts[b'address']
+        if not util.safehasattr(socket, b'AF_UNIX'):
+            raise error.Abort(_(b'unsupported platform'))
         if not self.address:
-            raise error.Abort(_('no socket path specified with --address'))
+            raise error.Abort(_(b'no socket path specified with --address'))
         self._servicehandler = handler or unixservicehandler(ui)
         self._sock = None
         self._mainipc = None
@@ -542,7 +542,7 @@
         # experimental config: cmdserver.max-repo-cache
         maxlen = ui.configint(b'cmdserver', b'max-repo-cache')
         if maxlen < 0:
-            raise error.Abort(_('negative max-repo-cache size not allowed'))
+            raise error.Abort(_(b'negative max-repo-cache size not allowed'))
         self._repoloader = repocache.repoloader(ui, maxlen)
 
     def init(self):
@@ -553,7 +553,7 @@
         o = socket.socketpair(socket.AF_UNIX, socket.SOCK_DGRAM)
         self._mainipc, self._workeripc = o
         self._servicehandler.bindsocket(self._sock, self.address)
-        if util.safehasattr(procutil, 'unblocksignal'):
+        if util.safehasattr(procutil, b'unblocksignal'):
             procutil.unblocksignal(signal.SIGCHLD)
         o = signal.signal(signal.SIGCHLD, self._sigchldhandler)
         self._oldsigchldhandler = o
--- a/mercurial/config.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/config.py	Sun Oct 06 09:48:39 2019 -0400
@@ -79,7 +79,7 @@
             return (section, item)
 
     def source(self, section, item):
-        return self._source.get((section, item), "")
+        return self._source.get((section, item), b"")
 
     def sections(self):
         return sorted(self._data.keys())
@@ -87,17 +87,17 @@
     def items(self, section):
         return list(self._data.get(section, {}).iteritems())
 
-    def set(self, section, item, value, source=""):
+    def set(self, section, item, value, source=b""):
         if pycompat.ispy3:
             assert not isinstance(
                 section, str
-            ), 'config section may not be unicode strings on Python 3'
+            ), b'config section may not be unicode strings on Python 3'
             assert not isinstance(
                 item, str
-            ), 'config item may not be unicode strings on Python 3'
+            ), b'config item may not be unicode strings on Python 3'
             assert not isinstance(
                 value, str
-            ), 'config values may not be unicode strings on Python 3'
+            ), b'config values may not be unicode strings on Python 3'
         if section not in self:
             self._data[section] = util.cowsortdict()
         else:
@@ -131,7 +131,7 @@
         commentre = util.re.compile(br'(;|#)')
         unsetre = util.re.compile(br'%unset\s+(\S+)')
         includere = util.re.compile(br'%include\s+(\S|\S.*\S)\s*$')
-        section = ""
+        section = b""
         item = None
         line = 0
         cont = False
@@ -141,7 +141,7 @@
 
         for l in data.splitlines(True):
             line += 1
-            if line == 1 and l.startswith('\xef\xbb\xbf'):
+            if line == 1 and l.startswith(b'\xef\xbb\xbf'):
                 # Someone set us up the BOM
                 l = l[3:]
             if cont:
@@ -151,8 +151,8 @@
                 if m:
                     if sections and section not in sections:
                         continue
-                    v = self.get(section, item) + "\n" + m.group(1)
-                    self.set(section, item, v, "%s:%d" % (src, line))
+                    v = self.get(section, item) + b"\n" + m.group(1)
+                    self.set(section, item, v, b"%s:%d" % (src, line))
                     continue
                 item = None
                 cont = False
@@ -171,9 +171,9 @@
                     except IOError as inst:
                         if inst.errno != errno.ENOENT:
                             raise error.ParseError(
-                                _("cannot include %s (%s)")
+                                _(b"cannot include %s (%s)")
                                 % (inc, inst.strerror),
-                                "%s:%d" % (src, line),
+                                b"%s:%d" % (src, line),
                             )
                 continue
             if emptyre.match(l):
@@ -192,7 +192,7 @@
                 cont = True
                 if sections and section not in sections:
                     continue
-                self.set(section, item, m.group(2), "%s:%d" % (src, line))
+                self.set(section, item, m.group(2), b"%s:%d" % (src, line))
                 continue
             m = unsetre.match(l)
             if m:
@@ -205,14 +205,14 @@
                 self._unset.append((section, name))
                 continue
 
-            raise error.ParseError(l.rstrip(), ("%s:%d" % (src, line)))
+            raise error.ParseError(l.rstrip(), (b"%s:%d" % (src, line)))
 
     def read(self, path, fp=None, sections=None, remap=None):
         if not fp:
-            fp = util.posixfile(path, 'rb')
+            fp = util.posixfile(path, b'rb')
         assert (
             getattr(fp, 'mode', r'rb') == r'rb'
-        ), 'config files must be opened in binary mode, got fp=%r mode=%r' % (
+        ), b'config files must be opened in binary mode, got fp=%r mode=%r' % (
             fp,
             fp.mode,
         )
@@ -231,41 +231,41 @@
     def _parse_plain(parts, s, offset):
         whitespace = False
         while offset < len(s) and (
-            s[offset : offset + 1].isspace() or s[offset : offset + 1] == ','
+            s[offset : offset + 1].isspace() or s[offset : offset + 1] == b','
         ):
             whitespace = True
             offset += 1
         if offset >= len(s):
             return None, parts, offset
         if whitespace:
-            parts.append('')
-        if s[offset : offset + 1] == '"' and not parts[-1]:
+            parts.append(b'')
+        if s[offset : offset + 1] == b'"' and not parts[-1]:
             return _parse_quote, parts, offset + 1
-        elif s[offset : offset + 1] == '"' and parts[-1][-1:] == '\\':
+        elif s[offset : offset + 1] == b'"' and parts[-1][-1:] == b'\\':
             parts[-1] = parts[-1][:-1] + s[offset : offset + 1]
             return _parse_plain, parts, offset + 1
         parts[-1] += s[offset : offset + 1]
         return _parse_plain, parts, offset + 1
 
     def _parse_quote(parts, s, offset):
-        if offset < len(s) and s[offset : offset + 1] == '"':  # ""
-            parts.append('')
+        if offset < len(s) and s[offset : offset + 1] == b'"':  # ""
+            parts.append(b'')
             offset += 1
             while offset < len(s) and (
                 s[offset : offset + 1].isspace()
-                or s[offset : offset + 1] == ','
+                or s[offset : offset + 1] == b','
             ):
                 offset += 1
             return _parse_plain, parts, offset
 
-        while offset < len(s) and s[offset : offset + 1] != '"':
+        while offset < len(s) and s[offset : offset + 1] != b'"':
             if (
-                s[offset : offset + 1] == '\\'
+                s[offset : offset + 1] == b'\\'
                 and offset + 1 < len(s)
-                and s[offset + 1 : offset + 2] == '"'
+                and s[offset + 1 : offset + 2] == b'"'
             ):
                 offset += 1
-                parts[-1] += '"'
+                parts[-1] += b'"'
             else:
                 parts[-1] += s[offset : offset + 1]
             offset += 1
@@ -273,39 +273,39 @@
         if offset >= len(s):
             real_parts = _configlist(parts[-1])
             if not real_parts:
-                parts[-1] = '"'
+                parts[-1] = b'"'
             else:
-                real_parts[0] = '"' + real_parts[0]
+                real_parts[0] = b'"' + real_parts[0]
                 parts = parts[:-1]
                 parts.extend(real_parts)
             return None, parts, offset
 
         offset += 1
-        while offset < len(s) and s[offset : offset + 1] in [' ', ',']:
+        while offset < len(s) and s[offset : offset + 1] in [b' ', b',']:
             offset += 1
 
         if offset < len(s):
-            if offset + 1 == len(s) and s[offset : offset + 1] == '"':
-                parts[-1] += '"'
+            if offset + 1 == len(s) and s[offset : offset + 1] == b'"':
+                parts[-1] += b'"'
                 offset += 1
             else:
-                parts.append('')
+                parts.append(b'')
         else:
             return None, parts, offset
 
         return _parse_plain, parts, offset
 
     def _configlist(s):
-        s = s.rstrip(' ,')
+        s = s.rstrip(b' ,')
         if not s:
             return []
-        parser, parts, offset = _parse_plain, [''], 0
+        parser, parts, offset = _parse_plain, [b''], 0
         while parser:
             parser, parts, offset = parser(parts, s, offset)
         return parts
 
     if value is not None and isinstance(value, bytes):
-        result = _configlist(value.lstrip(' ,\n'))
+        result = _configlist(value.lstrip(b' ,\n'))
     else:
         result = value
     return result or []
--- a/mercurial/configitems.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/configitems.py	Sun Oct 06 09:48:39 2019 -0400
@@ -23,9 +23,9 @@
         knownkeys = set(knownitems)
         newkeys = set(items)
         for key in sorted(knownkeys & newkeys):
-            msg = "extension '%s' overwrite config item '%s.%s'"
+            msg = b"extension '%s' overwrite config item '%s.%s'"
             msg %= (extname, section, key)
-            ui.develwarn(msg, config='warn-config')
+            ui.develwarn(msg, config=b'warn-config')
 
         knownitems.update(items)
 
@@ -110,7 +110,7 @@
     item = configitem(*args, **kwargs)
     section = configtable.setdefault(item.section, itemregister())
     if item.name in section:
-        msg = "duplicated config item registration for '%s.%s'"
+        msg = b"duplicated config item registration for '%s.%s'"
         raise error.ProgrammingError(msg % (item.section, item.name))
     section[item.name] = item
 
@@ -131,1401 +131,1404 @@
 coreconfigitem = getitemregister(coreitems)
 
 
-def _registerdiffopts(section, configprefix=''):
+def _registerdiffopts(section, configprefix=b''):
     coreconfigitem(
-        section, configprefix + 'nodates', default=False,
+        section, configprefix + b'nodates', default=False,
     )
     coreconfigitem(
-        section, configprefix + 'showfunc', default=False,
+        section, configprefix + b'showfunc', default=False,
     )
     coreconfigitem(
-        section, configprefix + 'unified', default=None,
+        section, configprefix + b'unified', default=None,
     )
     coreconfigitem(
-        section, configprefix + 'git', default=False,
+        section, configprefix + b'git', default=False,
     )
     coreconfigitem(
-        section, configprefix + 'ignorews', default=False,
+        section, configprefix + b'ignorews', default=False,
     )
     coreconfigitem(
-        section, configprefix + 'ignorewsamount', default=False,
+        section, configprefix + b'ignorewsamount', default=False,
     )
     coreconfigitem(
-        section, configprefix + 'ignoreblanklines', default=False,
+        section, configprefix + b'ignoreblanklines', default=False,
     )
     coreconfigitem(
-        section, configprefix + 'ignorewseol', default=False,
+        section, configprefix + b'ignorewseol', default=False,
     )
     coreconfigitem(
-        section, configprefix + 'nobinary', default=False,
+        section, configprefix + b'nobinary', default=False,
     )
     coreconfigitem(
-        section, configprefix + 'noprefix', default=False,
+        section, configprefix + b'noprefix', default=False,
     )
     coreconfigitem(
-        section, configprefix + 'word-diff', default=False,
+        section, configprefix + b'word-diff', default=False,
     )
 
 
 coreconfigitem(
-    'alias', '.*', default=dynamicdefault, generic=True,
+    b'alias', b'.*', default=dynamicdefault, generic=True,
 )
 coreconfigitem(
-    'auth', 'cookiefile', default=None,
+    b'auth', b'cookiefile', default=None,
 )
-_registerdiffopts(section='annotate')
+_registerdiffopts(section=b'annotate')
 # bookmarks.pushing: internal hack for discovery
 coreconfigitem(
-    'bookmarks', 'pushing', default=list,
+    b'bookmarks', b'pushing', default=list,
 )
 # bundle.mainreporoot: internal hack for bundlerepo
 coreconfigitem(
-    'bundle', 'mainreporoot', default='',
+    b'bundle', b'mainreporoot', default=b'',
 )
 coreconfigitem(
-    'censor', 'policy', default='abort', experimental=True,
+    b'censor', b'policy', default=b'abort', experimental=True,
 )
 coreconfigitem(
-    'chgserver', 'idletimeout', default=3600,
+    b'chgserver', b'idletimeout', default=3600,
 )
 coreconfigitem(
-    'chgserver', 'skiphash', default=False,
+    b'chgserver', b'skiphash', default=False,
 )
 coreconfigitem(
-    'cmdserver', 'log', default=None,
+    b'cmdserver', b'log', default=None,
 )
 coreconfigitem(
-    'cmdserver', 'max-log-files', default=7,
+    b'cmdserver', b'max-log-files', default=7,
 )
 coreconfigitem(
-    'cmdserver', 'max-log-size', default='1 MB',
+    b'cmdserver', b'max-log-size', default=b'1 MB',
 )
 coreconfigitem(
-    'cmdserver', 'max-repo-cache', default=0, experimental=True,
+    b'cmdserver', b'max-repo-cache', default=0, experimental=True,
 )
 coreconfigitem(
-    'cmdserver', 'message-encodings', default=list, experimental=True,
+    b'cmdserver', b'message-encodings', default=list, experimental=True,
 )
 coreconfigitem(
-    'cmdserver',
-    'track-log',
-    default=lambda: ['chgserver', 'cmdserver', 'repocache'],
+    b'cmdserver',
+    b'track-log',
+    default=lambda: [b'chgserver', b'cmdserver', b'repocache'],
 )
 coreconfigitem(
-    'color', '.*', default=None, generic=True,
+    b'color', b'.*', default=None, generic=True,
 )
 coreconfigitem(
-    'color', 'mode', default='auto',
+    b'color', b'mode', default=b'auto',
 )
 coreconfigitem(
-    'color', 'pagermode', default=dynamicdefault,
+    b'color', b'pagermode', default=dynamicdefault,
 )
-_registerdiffopts(section='commands', configprefix='commit.interactive.')
+_registerdiffopts(section=b'commands', configprefix=b'commit.interactive.')
 coreconfigitem(
-    'commands', 'commit.post-status', default=False,
+    b'commands', b'commit.post-status', default=False,
 )
 coreconfigitem(
-    'commands', 'grep.all-files', default=False, experimental=True,
+    b'commands', b'grep.all-files', default=False, experimental=True,
 )
 coreconfigitem(
-    'commands', 'resolve.confirm', default=False,
+    b'commands', b'resolve.confirm', default=False,
 )
 coreconfigitem(
-    'commands', 'resolve.explicit-re-merge', default=False,
+    b'commands', b'resolve.explicit-re-merge', default=False,
 )
 coreconfigitem(
-    'commands', 'resolve.mark-check', default='none',
+    b'commands', b'resolve.mark-check', default=b'none',
 )
-_registerdiffopts(section='commands', configprefix='revert.interactive.')
+_registerdiffopts(section=b'commands', configprefix=b'revert.interactive.')
 coreconfigitem(
-    'commands', 'show.aliasprefix', default=list,
+    b'commands', b'show.aliasprefix', default=list,
 )
 coreconfigitem(
-    'commands', 'status.relative', default=False,
+    b'commands', b'status.relative', default=False,
 )
 coreconfigitem(
-    'commands', 'status.skipstates', default=[], experimental=True,
+    b'commands', b'status.skipstates', default=[], experimental=True,
 )
 coreconfigitem(
-    'commands', 'status.terse', default='',
+    b'commands', b'status.terse', default=b'',
 )
 coreconfigitem(
-    'commands', 'status.verbose', default=False,
+    b'commands', b'status.verbose', default=False,
 )
 coreconfigitem(
-    'commands', 'update.check', default=None,
+    b'commands', b'update.check', default=None,
 )
 coreconfigitem(
-    'commands', 'update.requiredest', default=False,
+    b'commands', b'update.requiredest', default=False,
 )
 coreconfigitem(
-    'committemplate', '.*', default=None, generic=True,
+    b'committemplate', b'.*', default=None, generic=True,
 )
 coreconfigitem(
-    'convert', 'bzr.saverev', default=True,
+    b'convert', b'bzr.saverev', default=True,
 )
 coreconfigitem(
-    'convert', 'cvsps.cache', default=True,
+    b'convert', b'cvsps.cache', default=True,
 )
 coreconfigitem(
-    'convert', 'cvsps.fuzz', default=60,
+    b'convert', b'cvsps.fuzz', default=60,
 )
 coreconfigitem(
-    'convert', 'cvsps.logencoding', default=None,
+    b'convert', b'cvsps.logencoding', default=None,
 )
 coreconfigitem(
-    'convert', 'cvsps.mergefrom', default=None,
+    b'convert', b'cvsps.mergefrom', default=None,
 )
 coreconfigitem(
-    'convert', 'cvsps.mergeto', default=None,
+    b'convert', b'cvsps.mergeto', default=None,
 )
 coreconfigitem(
-    'convert', 'git.committeractions', default=lambda: ['messagedifferent'],
+    b'convert', b'git.committeractions', default=lambda: [b'messagedifferent'],
 )
 coreconfigitem(
-    'convert', 'git.extrakeys', default=list,
+    b'convert', b'git.extrakeys', default=list,
 )
 coreconfigitem(
-    'convert', 'git.findcopiesharder', default=False,
+    b'convert', b'git.findcopiesharder', default=False,
 )
 coreconfigitem(
-    'convert', 'git.remoteprefix', default='remote',
+    b'convert', b'git.remoteprefix', default=b'remote',
 )
 coreconfigitem(
-    'convert', 'git.renamelimit', default=400,
+    b'convert', b'git.renamelimit', default=400,
 )
 coreconfigitem(
-    'convert', 'git.saverev', default=True,
+    b'convert', b'git.saverev', default=True,
 )
 coreconfigitem(
-    'convert', 'git.similarity', default=50,
+    b'convert', b'git.similarity', default=50,
 )
 coreconfigitem(
-    'convert', 'git.skipsubmodules', default=False,
+    b'convert', b'git.skipsubmodules', default=False,
 )
 coreconfigitem(
-    'convert', 'hg.clonebranches', default=False,
+    b'convert', b'hg.clonebranches', default=False,
 )
 coreconfigitem(
-    'convert', 'hg.ignoreerrors', default=False,
+    b'convert', b'hg.ignoreerrors', default=False,
 )
 coreconfigitem(
-    'convert', 'hg.preserve-hash', default=False,
+    b'convert', b'hg.preserve-hash', default=False,
 )
 coreconfigitem(
-    'convert', 'hg.revs', default=None,
+    b'convert', b'hg.revs', default=None,
 )
 coreconfigitem(
-    'convert', 'hg.saverev', default=False,
+    b'convert', b'hg.saverev', default=False,
 )
 coreconfigitem(
-    'convert', 'hg.sourcename', default=None,
+    b'convert', b'hg.sourcename', default=None,
 )
 coreconfigitem(
-    'convert', 'hg.startrev', default=None,
+    b'convert', b'hg.startrev', default=None,
 )
 coreconfigitem(
-    'convert', 'hg.tagsbranch', default='default',
+    b'convert', b'hg.tagsbranch', default=b'default',
 )
 coreconfigitem(
-    'convert', 'hg.usebranchnames', default=True,
+    b'convert', b'hg.usebranchnames', default=True,
 )
 coreconfigitem(
-    'convert', 'ignoreancestorcheck', default=False, experimental=True,
+    b'convert', b'ignoreancestorcheck', default=False, experimental=True,
 )
 coreconfigitem(
-    'convert', 'localtimezone', default=False,
+    b'convert', b'localtimezone', default=False,
 )
 coreconfigitem(
-    'convert', 'p4.encoding', default=dynamicdefault,
+    b'convert', b'p4.encoding', default=dynamicdefault,
 )
 coreconfigitem(
-    'convert', 'p4.startrev', default=0,
+    b'convert', b'p4.startrev', default=0,
 )
 coreconfigitem(
-    'convert', 'skiptags', default=False,
+    b'convert', b'skiptags', default=False,
 )
 coreconfigitem(
-    'convert', 'svn.debugsvnlog', default=True,
+    b'convert', b'svn.debugsvnlog', default=True,
 )
 coreconfigitem(
-    'convert', 'svn.trunk', default=None,
+    b'convert', b'svn.trunk', default=None,
 )
 coreconfigitem(
-    'convert', 'svn.tags', default=None,
+    b'convert', b'svn.tags', default=None,
 )
 coreconfigitem(
-    'convert', 'svn.branches', default=None,
+    b'convert', b'svn.branches', default=None,
 )
 coreconfigitem(
-    'convert', 'svn.startrev', default=0,
+    b'convert', b'svn.startrev', default=0,
 )
 coreconfigitem(
-    'debug', 'dirstate.delaywrite', default=0,
+    b'debug', b'dirstate.delaywrite', default=0,
 )
 coreconfigitem(
-    'defaults', '.*', default=None, generic=True,
+    b'defaults', b'.*', default=None, generic=True,
 )
 coreconfigitem(
-    'devel', 'all-warnings', default=False,
+    b'devel', b'all-warnings', default=False,
 )
 coreconfigitem(
-    'devel', 'bundle2.debug', default=False,
+    b'devel', b'bundle2.debug', default=False,
 )
 coreconfigitem(
-    'devel', 'bundle.delta', default='',
+    b'devel', b'bundle.delta', default=b'',
 )
 coreconfigitem(
-    'devel', 'cache-vfs', default=None,
+    b'devel', b'cache-vfs', default=None,
 )
 coreconfigitem(
-    'devel', 'check-locks', default=False,
+    b'devel', b'check-locks', default=False,
 )
 coreconfigitem(
-    'devel', 'check-relroot', default=False,
+    b'devel', b'check-relroot', default=False,
 )
 coreconfigitem(
-    'devel', 'default-date', default=None,
+    b'devel', b'default-date', default=None,
 )
 coreconfigitem(
-    'devel', 'deprec-warn', default=False,
+    b'devel', b'deprec-warn', default=False,
 )
 coreconfigitem(
-    'devel', 'disableloaddefaultcerts', default=False,
+    b'devel', b'disableloaddefaultcerts', default=False,
 )
 coreconfigitem(
-    'devel', 'warn-empty-changegroup', default=False,
+    b'devel', b'warn-empty-changegroup', default=False,
 )
 coreconfigitem(
-    'devel', 'legacy.exchange', default=list,
+    b'devel', b'legacy.exchange', default=list,
 )
 coreconfigitem(
-    'devel', 'servercafile', default='',
+    b'devel', b'servercafile', default=b'',
 )
 coreconfigitem(
-    'devel', 'serverexactprotocol', default='',
+    b'devel', b'serverexactprotocol', default=b'',
 )
 coreconfigitem(
-    'devel', 'serverrequirecert', default=False,
+    b'devel', b'serverrequirecert', default=False,
 )
 coreconfigitem(
-    'devel', 'strip-obsmarkers', default=True,
+    b'devel', b'strip-obsmarkers', default=True,
 )
 coreconfigitem(
-    'devel', 'warn-config', default=None,
+    b'devel', b'warn-config', default=None,
 )
 coreconfigitem(
-    'devel', 'warn-config-default', default=None,
+    b'devel', b'warn-config-default', default=None,
 )
 coreconfigitem(
-    'devel', 'user.obsmarker', default=None,
+    b'devel', b'user.obsmarker', default=None,
 )
 coreconfigitem(
-    'devel', 'warn-config-unknown', default=None,
+    b'devel', b'warn-config-unknown', default=None,
 )
 coreconfigitem(
-    'devel', 'debug.copies', default=False,
+    b'devel', b'debug.copies', default=False,
 )
 coreconfigitem(
-    'devel', 'debug.extensions', default=False,
+    b'devel', b'debug.extensions', default=False,
 )
 coreconfigitem(
-    'devel', 'debug.peer-request', default=False,
+    b'devel', b'debug.peer-request', default=False,
 )
 coreconfigitem(
-    'devel', 'discovery.randomize', default=True,
+    b'devel', b'discovery.randomize', default=True,
 )
-_registerdiffopts(section='diff')
+_registerdiffopts(section=b'diff')
 coreconfigitem(
-    'email', 'bcc', default=None,
+    b'email', b'bcc', default=None,
 )
 coreconfigitem(
-    'email', 'cc', default=None,
+    b'email', b'cc', default=None,
 )
 coreconfigitem(
-    'email', 'charsets', default=list,
+    b'email', b'charsets', default=list,
 )
 coreconfigitem(
-    'email', 'from', default=None,
+    b'email', b'from', default=None,
 )
 coreconfigitem(
-    'email', 'method', default='smtp',
+    b'email', b'method', default=b'smtp',
 )
 coreconfigitem(
-    'email', 'reply-to', default=None,
+    b'email', b'reply-to', default=None,
 )
 coreconfigitem(
-    'email', 'to', default=None,
+    b'email', b'to', default=None,
 )
 coreconfigitem(
-    'experimental', 'archivemetatemplate', default=dynamicdefault,
+    b'experimental', b'archivemetatemplate', default=dynamicdefault,
 )
 coreconfigitem(
-    'experimental', 'auto-publish', default='publish',
+    b'experimental', b'auto-publish', default=b'publish',
 )
 coreconfigitem(
-    'experimental', 'bundle-phases', default=False,
+    b'experimental', b'bundle-phases', default=False,
 )
 coreconfigitem(
-    'experimental', 'bundle2-advertise', default=True,
+    b'experimental', b'bundle2-advertise', default=True,
 )
 coreconfigitem(
-    'experimental', 'bundle2-output-capture', default=False,
+    b'experimental', b'bundle2-output-capture', default=False,
 )
 coreconfigitem(
-    'experimental', 'bundle2.pushback', default=False,
+    b'experimental', b'bundle2.pushback', default=False,
 )
 coreconfigitem(
-    'experimental', 'bundle2lazylocking', default=False,
+    b'experimental', b'bundle2lazylocking', default=False,
 )
 coreconfigitem(
-    'experimental', 'bundlecomplevel', default=None,
+    b'experimental', b'bundlecomplevel', default=None,
 )
 coreconfigitem(
-    'experimental', 'bundlecomplevel.bzip2', default=None,
+    b'experimental', b'bundlecomplevel.bzip2', default=None,
 )
 coreconfigitem(
-    'experimental', 'bundlecomplevel.gzip', default=None,
+    b'experimental', b'bundlecomplevel.gzip', default=None,
 )
 coreconfigitem(
-    'experimental', 'bundlecomplevel.none', default=None,
+    b'experimental', b'bundlecomplevel.none', default=None,
 )
 coreconfigitem(
-    'experimental', 'bundlecomplevel.zstd', default=None,
+    b'experimental', b'bundlecomplevel.zstd', default=None,
 )
 coreconfigitem(
-    'experimental', 'changegroup3', default=False,
+    b'experimental', b'changegroup3', default=False,
 )
 coreconfigitem(
-    'experimental', 'cleanup-as-archived', default=False,
+    b'experimental', b'cleanup-as-archived', default=False,
 )
 coreconfigitem(
-    'experimental', 'clientcompressionengines', default=list,
+    b'experimental', b'clientcompressionengines', default=list,
 )
 coreconfigitem(
-    'experimental', 'copytrace', default='on',
+    b'experimental', b'copytrace', default=b'on',
 )
 coreconfigitem(
-    'experimental', 'copytrace.movecandidateslimit', default=100,
+    b'experimental', b'copytrace.movecandidateslimit', default=100,
 )
 coreconfigitem(
-    'experimental', 'copytrace.sourcecommitlimit', default=100,
+    b'experimental', b'copytrace.sourcecommitlimit', default=100,
 )
 coreconfigitem(
-    'experimental', 'copies.read-from', default="filelog-only",
+    b'experimental', b'copies.read-from', default=b"filelog-only",
 )
 coreconfigitem(
-    'experimental', 'copies.write-to', default='filelog-only',
+    b'experimental', b'copies.write-to', default=b'filelog-only',
 )
 coreconfigitem(
-    'experimental', 'crecordtest', default=None,
+    b'experimental', b'crecordtest', default=None,
 )
 coreconfigitem(
-    'experimental', 'directaccess', default=False,
+    b'experimental', b'directaccess', default=False,
 )
 coreconfigitem(
-    'experimental', 'directaccess.revnums', default=False,
+    b'experimental', b'directaccess.revnums', default=False,
 )
 coreconfigitem(
-    'experimental', 'editortmpinhg', default=False,
+    b'experimental', b'editortmpinhg', default=False,
 )
 coreconfigitem(
-    'experimental', 'evolution', default=list,
+    b'experimental', b'evolution', default=list,
 )
 coreconfigitem(
-    'experimental',
-    'evolution.allowdivergence',
+    b'experimental',
+    b'evolution.allowdivergence',
     default=False,
-    alias=[('experimental', 'allowdivergence')],
+    alias=[(b'experimental', b'allowdivergence')],
 )
 coreconfigitem(
-    'experimental', 'evolution.allowunstable', default=None,
+    b'experimental', b'evolution.allowunstable', default=None,
 )
 coreconfigitem(
-    'experimental', 'evolution.createmarkers', default=None,
+    b'experimental', b'evolution.createmarkers', default=None,
 )
 coreconfigitem(
-    'experimental',
-    'evolution.effect-flags',
+    b'experimental',
+    b'evolution.effect-flags',
     default=True,
-    alias=[('experimental', 'effect-flags')],
+    alias=[(b'experimental', b'effect-flags')],
 )
 coreconfigitem(
-    'experimental', 'evolution.exchange', default=None,
+    b'experimental', b'evolution.exchange', default=None,
 )
 coreconfigitem(
-    'experimental', 'evolution.bundle-obsmarker', default=False,
+    b'experimental', b'evolution.bundle-obsmarker', default=False,
 )
 coreconfigitem(
-    'experimental', 'log.topo', default=False,
+    b'experimental', b'log.topo', default=False,
 )
 coreconfigitem(
-    'experimental', 'evolution.report-instabilities', default=True,
+    b'experimental', b'evolution.report-instabilities', default=True,
 )
 coreconfigitem(
-    'experimental', 'evolution.track-operation', default=True,
+    b'experimental', b'evolution.track-operation', default=True,
 )
 # repo-level config to exclude a revset visibility
 #
 # The target use case is to use `share` to expose different subset of the same
 # repository, especially server side. See also `server.view`.
 coreconfigitem(
-    'experimental', 'extra-filter-revs', default=None,
+    b'experimental', b'extra-filter-revs', default=None,
 )
 coreconfigitem(
-    'experimental', 'maxdeltachainspan', default=-1,
+    b'experimental', b'maxdeltachainspan', default=-1,
 )
 coreconfigitem(
-    'experimental', 'mergetempdirprefix', default=None,
+    b'experimental', b'mergetempdirprefix', default=None,
 )
 coreconfigitem(
-    'experimental', 'mmapindexthreshold', default=None,
+    b'experimental', b'mmapindexthreshold', default=None,
 )
 coreconfigitem(
-    'experimental', 'narrow', default=False,
+    b'experimental', b'narrow', default=False,
 )
 coreconfigitem(
-    'experimental', 'nonnormalparanoidcheck', default=False,
+    b'experimental', b'nonnormalparanoidcheck', default=False,
 )
 coreconfigitem(
-    'experimental', 'exportableenviron', default=list,
+    b'experimental', b'exportableenviron', default=list,
 )
 coreconfigitem(
-    'experimental', 'extendedheader.index', default=None,
+    b'experimental', b'extendedheader.index', default=None,
 )
 coreconfigitem(
-    'experimental', 'extendedheader.similarity', default=False,
+    b'experimental', b'extendedheader.similarity', default=False,
 )
 coreconfigitem(
-    'experimental', 'graphshorten', default=False,
+    b'experimental', b'graphshorten', default=False,
 )
 coreconfigitem(
-    'experimental', 'graphstyle.parent', default=dynamicdefault,
+    b'experimental', b'graphstyle.parent', default=dynamicdefault,
 )
 coreconfigitem(
-    'experimental', 'graphstyle.missing', default=dynamicdefault,
+    b'experimental', b'graphstyle.missing', default=dynamicdefault,
 )
 coreconfigitem(
-    'experimental', 'graphstyle.grandparent', default=dynamicdefault,
+    b'experimental', b'graphstyle.grandparent', default=dynamicdefault,
 )
 coreconfigitem(
-    'experimental', 'hook-track-tags', default=False,
+    b'experimental', b'hook-track-tags', default=False,
 )
 coreconfigitem(
-    'experimental', 'httppeer.advertise-v2', default=False,
+    b'experimental', b'httppeer.advertise-v2', default=False,
 )
 coreconfigitem(
-    'experimental', 'httppeer.v2-encoder-order', default=None,
+    b'experimental', b'httppeer.v2-encoder-order', default=None,
 )
 coreconfigitem(
-    'experimental', 'httppostargs', default=False,
+    b'experimental', b'httppostargs', default=False,
 )
 coreconfigitem(
-    'experimental', 'mergedriver', default=None,
+    b'experimental', b'mergedriver', default=None,
 )
-coreconfigitem('experimental', 'nointerrupt', default=False)
-coreconfigitem('experimental', 'nointerrupt-interactiveonly', default=True)
+coreconfigitem(b'experimental', b'nointerrupt', default=False)
+coreconfigitem(b'experimental', b'nointerrupt-interactiveonly', default=True)
 
 coreconfigitem(
-    'experimental', 'obsmarkers-exchange-debug', default=False,
+    b'experimental', b'obsmarkers-exchange-debug', default=False,
 )
 coreconfigitem(
-    'experimental', 'remotenames', default=False,
+    b'experimental', b'remotenames', default=False,
 )
 coreconfigitem(
-    'experimental', 'removeemptydirs', default=True,
+    b'experimental', b'removeemptydirs', default=True,
 )
 coreconfigitem(
-    'experimental', 'revert.interactive.select-to-keep', default=False,
+    b'experimental', b'revert.interactive.select-to-keep', default=False,
 )
 coreconfigitem(
-    'experimental', 'revisions.prefixhexnode', default=False,
+    b'experimental', b'revisions.prefixhexnode', default=False,
 )
 coreconfigitem(
-    'experimental', 'revlogv2', default=None,
+    b'experimental', b'revlogv2', default=None,
 )
 coreconfigitem(
-    'experimental', 'revisions.disambiguatewithin', default=None,
+    b'experimental', b'revisions.disambiguatewithin', default=None,
 )
 coreconfigitem(
-    'experimental', 'server.filesdata.recommended-batch-size', default=50000,
+    b'experimental', b'server.filesdata.recommended-batch-size', default=50000,
 )
 coreconfigitem(
-    'experimental',
-    'server.manifestdata.recommended-batch-size',
+    b'experimental',
+    b'server.manifestdata.recommended-batch-size',
     default=100000,
 )
 coreconfigitem(
-    'experimental', 'server.stream-narrow-clones', default=False,
+    b'experimental', b'server.stream-narrow-clones', default=False,
 )
 coreconfigitem(
-    'experimental', 'single-head-per-branch', default=False,
+    b'experimental', b'single-head-per-branch', default=False,
 )
 coreconfigitem(
-    'experimental',
-    'single-head-per-branch:account-closed-heads',
+    b'experimental',
+    b'single-head-per-branch:account-closed-heads',
     default=False,
 )
 coreconfigitem(
-    'experimental', 'sshserver.support-v2', default=False,
+    b'experimental', b'sshserver.support-v2', default=False,
 )
 coreconfigitem(
-    'experimental', 'sparse-read', default=False,
+    b'experimental', b'sparse-read', default=False,
 )
 coreconfigitem(
-    'experimental', 'sparse-read.density-threshold', default=0.50,
+    b'experimental', b'sparse-read.density-threshold', default=0.50,
 )
 coreconfigitem(
-    'experimental', 'sparse-read.min-gap-size', default='65K',
+    b'experimental', b'sparse-read.min-gap-size', default=b'65K',
 )
 coreconfigitem(
-    'experimental', 'treemanifest', default=False,
+    b'experimental', b'treemanifest', default=False,
 )
 coreconfigitem(
-    'experimental', 'update.atomic-file', default=False,
+    b'experimental', b'update.atomic-file', default=False,
 )
 coreconfigitem(
-    'experimental', 'sshpeer.advertise-v2', default=False,
+    b'experimental', b'sshpeer.advertise-v2', default=False,
 )
 coreconfigitem(
-    'experimental', 'web.apiserver', default=False,
+    b'experimental', b'web.apiserver', default=False,
 )
 coreconfigitem(
-    'experimental', 'web.api.http-v2', default=False,
+    b'experimental', b'web.api.http-v2', default=False,
 )
 coreconfigitem(
-    'experimental', 'web.api.debugreflect', default=False,
+    b'experimental', b'web.api.debugreflect', default=False,
 )
 coreconfigitem(
-    'experimental', 'worker.wdir-get-thread-safe', default=False,
+    b'experimental', b'worker.wdir-get-thread-safe', default=False,
 )
 coreconfigitem(
-    'experimental', 'xdiff', default=False,
+    b'experimental', b'xdiff', default=False,
 )
 coreconfigitem(
-    'extensions', '.*', default=None, generic=True,
+    b'extensions', b'.*', default=None, generic=True,
 )
 coreconfigitem(
-    'extdata', '.*', default=None, generic=True,
+    b'extdata', b'.*', default=None, generic=True,
 )
 coreconfigitem(
-    'format', 'bookmarks-in-store', default=False,
+    b'format', b'bookmarks-in-store', default=False,
 )
 coreconfigitem(
-    'format', 'chunkcachesize', default=None, experimental=True,
+    b'format', b'chunkcachesize', default=None, experimental=True,
 )
 coreconfigitem(
-    'format', 'dotencode', default=True,
+    b'format', b'dotencode', default=True,
 )
 coreconfigitem(
-    'format', 'generaldelta', default=False, experimental=True,
+    b'format', b'generaldelta', default=False, experimental=True,
 )
 coreconfigitem(
-    'format', 'manifestcachesize', default=None, experimental=True,
+    b'format', b'manifestcachesize', default=None, experimental=True,
 )
 coreconfigitem(
-    'format', 'maxchainlen', default=dynamicdefault, experimental=True,
+    b'format', b'maxchainlen', default=dynamicdefault, experimental=True,
 )
 coreconfigitem(
-    'format', 'obsstore-version', default=None,
+    b'format', b'obsstore-version', default=None,
 )
 coreconfigitem(
-    'format', 'sparse-revlog', default=True,
+    b'format', b'sparse-revlog', default=True,
 )
 coreconfigitem(
-    'format',
-    'revlog-compression',
-    default='zlib',
-    alias=[('experimental', 'format.compression')],
+    b'format',
+    b'revlog-compression',
+    default=b'zlib',
+    alias=[(b'experimental', b'format.compression')],
 )
 coreconfigitem(
-    'format', 'usefncache', default=True,
+    b'format', b'usefncache', default=True,
+)
+coreconfigitem(
+    b'format', b'usegeneraldelta', default=True,
 )
 coreconfigitem(
-    'format', 'usegeneraldelta', default=True,
+    b'format', b'usestore', default=True,
 )
 coreconfigitem(
-    'format', 'usestore', default=True,
+    b'format', b'use-side-data', default=False, experimental=True,
 )
 coreconfigitem(
-    'format', 'use-side-data', default=False, experimental=True,
+    b'format', b'internal-phase', default=False, experimental=True,
 )
 coreconfigitem(
-    'format', 'internal-phase', default=False, experimental=True,
+    b'fsmonitor', b'warn_when_unused', default=True,
 )
 coreconfigitem(
-    'fsmonitor', 'warn_when_unused', default=True,
+    b'fsmonitor', b'warn_update_file_count', default=50000,
 )
 coreconfigitem(
-    'fsmonitor', 'warn_update_file_count', default=50000,
+    b'help', br'hidden-command\..*', default=False, generic=True,
 )
 coreconfigitem(
-    'help', br'hidden-command\..*', default=False, generic=True,
+    b'help', br'hidden-topic\..*', default=False, generic=True,
 )
 coreconfigitem(
-    'help', br'hidden-topic\..*', default=False, generic=True,
+    b'hooks', b'.*', default=dynamicdefault, generic=True,
 )
 coreconfigitem(
-    'hooks', '.*', default=dynamicdefault, generic=True,
+    b'hgweb-paths', b'.*', default=list, generic=True,
 )
 coreconfigitem(
-    'hgweb-paths', '.*', default=list, generic=True,
+    b'hostfingerprints', b'.*', default=list, generic=True,
 )
 coreconfigitem(
-    'hostfingerprints', '.*', default=list, generic=True,
+    b'hostsecurity', b'ciphers', default=None,
 )
 coreconfigitem(
-    'hostsecurity', 'ciphers', default=None,
+    b'hostsecurity', b'disabletls10warning', default=False,
 )
 coreconfigitem(
-    'hostsecurity', 'disabletls10warning', default=False,
-)
-coreconfigitem(
-    'hostsecurity', 'minimumprotocol', default=dynamicdefault,
+    b'hostsecurity', b'minimumprotocol', default=dynamicdefault,
 )
 coreconfigitem(
-    'hostsecurity', '.*:minimumprotocol$', default=dynamicdefault, generic=True,
+    b'hostsecurity',
+    b'.*:minimumprotocol$',
+    default=dynamicdefault,
+    generic=True,
 )
 coreconfigitem(
-    'hostsecurity', '.*:ciphers$', default=dynamicdefault, generic=True,
+    b'hostsecurity', b'.*:ciphers$', default=dynamicdefault, generic=True,
 )
 coreconfigitem(
-    'hostsecurity', '.*:fingerprints$', default=list, generic=True,
+    b'hostsecurity', b'.*:fingerprints$', default=list, generic=True,
 )
 coreconfigitem(
-    'hostsecurity', '.*:verifycertsfile$', default=None, generic=True,
+    b'hostsecurity', b'.*:verifycertsfile$', default=None, generic=True,
 )
 
 coreconfigitem(
-    'http_proxy', 'always', default=False,
+    b'http_proxy', b'always', default=False,
 )
 coreconfigitem(
-    'http_proxy', 'host', default=None,
+    b'http_proxy', b'host', default=None,
 )
 coreconfigitem(
-    'http_proxy', 'no', default=list,
+    b'http_proxy', b'no', default=list,
 )
 coreconfigitem(
-    'http_proxy', 'passwd', default=None,
+    b'http_proxy', b'passwd', default=None,
 )
 coreconfigitem(
-    'http_proxy', 'user', default=None,
+    b'http_proxy', b'user', default=None,
 )
 
 coreconfigitem(
-    'http', 'timeout', default=None,
+    b'http', b'timeout', default=None,
 )
 
 coreconfigitem(
-    'logtoprocess', 'commandexception', default=None,
+    b'logtoprocess', b'commandexception', default=None,
 )
 coreconfigitem(
-    'logtoprocess', 'commandfinish', default=None,
+    b'logtoprocess', b'commandfinish', default=None,
 )
 coreconfigitem(
-    'logtoprocess', 'command', default=None,
+    b'logtoprocess', b'command', default=None,
 )
 coreconfigitem(
-    'logtoprocess', 'develwarn', default=None,
+    b'logtoprocess', b'develwarn', default=None,
 )
 coreconfigitem(
-    'logtoprocess', 'uiblocked', default=None,
+    b'logtoprocess', b'uiblocked', default=None,
 )
 coreconfigitem(
-    'merge', 'checkunknown', default='abort',
+    b'merge', b'checkunknown', default=b'abort',
 )
 coreconfigitem(
-    'merge', 'checkignored', default='abort',
+    b'merge', b'checkignored', default=b'abort',
 )
 coreconfigitem(
-    'experimental', 'merge.checkpathconflicts', default=False,
+    b'experimental', b'merge.checkpathconflicts', default=False,
 )
 coreconfigitem(
-    'merge', 'followcopies', default=True,
+    b'merge', b'followcopies', default=True,
 )
 coreconfigitem(
-    'merge', 'on-failure', default='continue',
+    b'merge', b'on-failure', default=b'continue',
 )
 coreconfigitem(
-    'merge', 'preferancestor', default=lambda: ['*'], experimental=True,
+    b'merge', b'preferancestor', default=lambda: [b'*'], experimental=True,
 )
 coreconfigitem(
-    'merge', 'strict-capability-check', default=False,
+    b'merge', b'strict-capability-check', default=False,
 )
 coreconfigitem(
-    'merge-tools', '.*', default=None, generic=True,
+    b'merge-tools', b'.*', default=None, generic=True,
 )
 coreconfigitem(
-    'merge-tools',
+    b'merge-tools',
     br'.*\.args$',
-    default="$local $base $other",
+    default=b"$local $base $other",
     generic=True,
     priority=-1,
 )
 coreconfigitem(
-    'merge-tools', br'.*\.binary$', default=False, generic=True, priority=-1,
+    b'merge-tools', br'.*\.binary$', default=False, generic=True, priority=-1,
 )
 coreconfigitem(
-    'merge-tools', br'.*\.check$', default=list, generic=True, priority=-1,
+    b'merge-tools', br'.*\.check$', default=list, generic=True, priority=-1,
 )
 coreconfigitem(
-    'merge-tools',
+    b'merge-tools',
     br'.*\.checkchanged$',
     default=False,
     generic=True,
     priority=-1,
 )
 coreconfigitem(
-    'merge-tools',
+    b'merge-tools',
     br'.*\.executable$',
     default=dynamicdefault,
     generic=True,
     priority=-1,
 )
 coreconfigitem(
-    'merge-tools', br'.*\.fixeol$', default=False, generic=True, priority=-1,
+    b'merge-tools', br'.*\.fixeol$', default=False, generic=True, priority=-1,
 )
 coreconfigitem(
-    'merge-tools', br'.*\.gui$', default=False, generic=True, priority=-1,
+    b'merge-tools', br'.*\.gui$', default=False, generic=True, priority=-1,
 )
 coreconfigitem(
-    'merge-tools',
+    b'merge-tools',
     br'.*\.mergemarkers$',
-    default='basic',
+    default=b'basic',
     generic=True,
     priority=-1,
 )
 coreconfigitem(
-    'merge-tools',
+    b'merge-tools',
     br'.*\.mergemarkertemplate$',
     default=dynamicdefault,  # take from ui.mergemarkertemplate
     generic=True,
     priority=-1,
 )
 coreconfigitem(
-    'merge-tools', br'.*\.priority$', default=0, generic=True, priority=-1,
+    b'merge-tools', br'.*\.priority$', default=0, generic=True, priority=-1,
 )
 coreconfigitem(
-    'merge-tools',
+    b'merge-tools',
     br'.*\.premerge$',
     default=dynamicdefault,
     generic=True,
     priority=-1,
 )
 coreconfigitem(
-    'merge-tools', br'.*\.symlink$', default=False, generic=True, priority=-1,
+    b'merge-tools', br'.*\.symlink$', default=False, generic=True, priority=-1,
 )
 coreconfigitem(
-    'pager', 'attend-.*', default=dynamicdefault, generic=True,
+    b'pager', b'attend-.*', default=dynamicdefault, generic=True,
 )
 coreconfigitem(
-    'pager', 'ignore', default=list,
+    b'pager', b'ignore', default=list,
 )
 coreconfigitem(
-    'pager', 'pager', default=dynamicdefault,
+    b'pager', b'pager', default=dynamicdefault,
 )
 coreconfigitem(
-    'patch', 'eol', default='strict',
+    b'patch', b'eol', default=b'strict',
 )
 coreconfigitem(
-    'patch', 'fuzz', default=2,
+    b'patch', b'fuzz', default=2,
 )
 coreconfigitem(
-    'paths', 'default', default=None,
+    b'paths', b'default', default=None,
 )
 coreconfigitem(
-    'paths', 'default-push', default=None,
+    b'paths', b'default-push', default=None,
 )
 coreconfigitem(
-    'paths', '.*', default=None, generic=True,
+    b'paths', b'.*', default=None, generic=True,
 )
 coreconfigitem(
-    'phases', 'checksubrepos', default='follow',
+    b'phases', b'checksubrepos', default=b'follow',
 )
 coreconfigitem(
-    'phases', 'new-commit', default='draft',
+    b'phases', b'new-commit', default=b'draft',
 )
 coreconfigitem(
-    'phases', 'publish', default=True,
+    b'phases', b'publish', default=True,
 )
 coreconfigitem(
-    'profiling', 'enabled', default=False,
+    b'profiling', b'enabled', default=False,
 )
 coreconfigitem(
-    'profiling', 'format', default='text',
+    b'profiling', b'format', default=b'text',
 )
 coreconfigitem(
-    'profiling', 'freq', default=1000,
+    b'profiling', b'freq', default=1000,
 )
 coreconfigitem(
-    'profiling', 'limit', default=30,
+    b'profiling', b'limit', default=30,
 )
 coreconfigitem(
-    'profiling', 'nested', default=0,
+    b'profiling', b'nested', default=0,
 )
 coreconfigitem(
-    'profiling', 'output', default=None,
+    b'profiling', b'output', default=None,
 )
 coreconfigitem(
-    'profiling', 'showmax', default=0.999,
+    b'profiling', b'showmax', default=0.999,
 )
 coreconfigitem(
-    'profiling', 'showmin', default=dynamicdefault,
+    b'profiling', b'showmin', default=dynamicdefault,
 )
 coreconfigitem(
-    'profiling', 'showtime', default=True,
+    b'profiling', b'showtime', default=True,
 )
 coreconfigitem(
-    'profiling', 'sort', default='inlinetime',
+    b'profiling', b'sort', default=b'inlinetime',
 )
 coreconfigitem(
-    'profiling', 'statformat', default='hotpath',
+    b'profiling', b'statformat', default=b'hotpath',
 )
 coreconfigitem(
-    'profiling', 'time-track', default=dynamicdefault,
+    b'profiling', b'time-track', default=dynamicdefault,
 )
 coreconfigitem(
-    'profiling', 'type', default='stat',
+    b'profiling', b'type', default=b'stat',
 )
 coreconfigitem(
-    'progress', 'assume-tty', default=False,
+    b'progress', b'assume-tty', default=False,
 )
 coreconfigitem(
-    'progress', 'changedelay', default=1,
+    b'progress', b'changedelay', default=1,
 )
 coreconfigitem(
-    'progress', 'clear-complete', default=True,
+    b'progress', b'clear-complete', default=True,
 )
 coreconfigitem(
-    'progress', 'debug', default=False,
+    b'progress', b'debug', default=False,
 )
 coreconfigitem(
-    'progress', 'delay', default=3,
+    b'progress', b'delay', default=3,
 )
 coreconfigitem(
-    'progress', 'disable', default=False,
+    b'progress', b'disable', default=False,
 )
 coreconfigitem(
-    'progress', 'estimateinterval', default=60.0,
+    b'progress', b'estimateinterval', default=60.0,
 )
 coreconfigitem(
-    'progress',
-    'format',
-    default=lambda: ['topic', 'bar', 'number', 'estimate'],
+    b'progress',
+    b'format',
+    default=lambda: [b'topic', b'bar', b'number', b'estimate'],
 )
 coreconfigitem(
-    'progress', 'refresh', default=0.1,
+    b'progress', b'refresh', default=0.1,
 )
 coreconfigitem(
-    'progress', 'width', default=dynamicdefault,
+    b'progress', b'width', default=dynamicdefault,
 )
 coreconfigitem(
-    'push', 'pushvars.server', default=False,
+    b'push', b'pushvars.server', default=False,
 )
 coreconfigitem(
-    'rewrite',
-    'backup-bundle',
+    b'rewrite',
+    b'backup-bundle',
     default=True,
-    alias=[('ui', 'history-editing-backup')],
+    alias=[(b'ui', b'history-editing-backup')],
 )
 coreconfigitem(
-    'rewrite', 'update-timestamp', default=False,
+    b'rewrite', b'update-timestamp', default=False,
 )
 coreconfigitem(
-    'storage', 'new-repo-backend', default='revlogv1', experimental=True,
+    b'storage', b'new-repo-backend', default=b'revlogv1', experimental=True,
 )
 coreconfigitem(
-    'storage',
-    'revlog.optimize-delta-parent-choice',
+    b'storage',
+    b'revlog.optimize-delta-parent-choice',
     default=True,
-    alias=[('format', 'aggressivemergedeltas')],
+    alias=[(b'format', b'aggressivemergedeltas')],
 )
 coreconfigitem(
-    'storage', 'revlog.reuse-external-delta', default=True,
+    b'storage', b'revlog.reuse-external-delta', default=True,
 )
 coreconfigitem(
-    'storage', 'revlog.reuse-external-delta-parent', default=None,
+    b'storage', b'revlog.reuse-external-delta-parent', default=None,
 )
 coreconfigitem(
-    'storage', 'revlog.zlib.level', default=None,
+    b'storage', b'revlog.zlib.level', default=None,
 )
 coreconfigitem(
-    'storage', 'revlog.zstd.level', default=None,
+    b'storage', b'revlog.zstd.level', default=None,
 )
 coreconfigitem(
-    'server', 'bookmarks-pushkey-compat', default=True,
+    b'server', b'bookmarks-pushkey-compat', default=True,
 )
 coreconfigitem(
-    'server', 'bundle1', default=True,
+    b'server', b'bundle1', default=True,
 )
 coreconfigitem(
-    'server', 'bundle1gd', default=None,
+    b'server', b'bundle1gd', default=None,
 )
 coreconfigitem(
-    'server', 'bundle1.pull', default=None,
+    b'server', b'bundle1.pull', default=None,
 )
 coreconfigitem(
-    'server', 'bundle1gd.pull', default=None,
+    b'server', b'bundle1gd.pull', default=None,
 )
 coreconfigitem(
-    'server', 'bundle1.push', default=None,
+    b'server', b'bundle1.push', default=None,
 )
 coreconfigitem(
-    'server', 'bundle1gd.push', default=None,
+    b'server', b'bundle1gd.push', default=None,
 )
 coreconfigitem(
-    'server',
-    'bundle2.stream',
+    b'server',
+    b'bundle2.stream',
     default=True,
-    alias=[('experimental', 'bundle2.stream')],
+    alias=[(b'experimental', b'bundle2.stream')],
 )
 coreconfigitem(
-    'server', 'compressionengines', default=list,
+    b'server', b'compressionengines', default=list,
 )
 coreconfigitem(
-    'server', 'concurrent-push-mode', default='strict',
+    b'server', b'concurrent-push-mode', default=b'strict',
 )
 coreconfigitem(
-    'server', 'disablefullbundle', default=False,
+    b'server', b'disablefullbundle', default=False,
 )
 coreconfigitem(
-    'server', 'maxhttpheaderlen', default=1024,
+    b'server', b'maxhttpheaderlen', default=1024,
 )
 coreconfigitem(
-    'server', 'pullbundle', default=False,
+    b'server', b'pullbundle', default=False,
 )
 coreconfigitem(
-    'server', 'preferuncompressed', default=False,
+    b'server', b'preferuncompressed', default=False,
 )
 coreconfigitem(
-    'server', 'streamunbundle', default=False,
+    b'server', b'streamunbundle', default=False,
 )
 coreconfigitem(
-    'server', 'uncompressed', default=True,
+    b'server', b'uncompressed', default=True,
 )
 coreconfigitem(
-    'server', 'uncompressedallowsecret', default=False,
+    b'server', b'uncompressedallowsecret', default=False,
 )
 coreconfigitem(
-    'server', 'view', default='served',
+    b'server', b'view', default=b'served',
 )
 coreconfigitem(
-    'server', 'validate', default=False,
+    b'server', b'validate', default=False,
 )
 coreconfigitem(
-    'server', 'zliblevel', default=-1,
+    b'server', b'zliblevel', default=-1,
 )
 coreconfigitem(
-    'server', 'zstdlevel', default=3,
+    b'server', b'zstdlevel', default=3,
 )
 coreconfigitem(
-    'share', 'pool', default=None,
+    b'share', b'pool', default=None,
 )
 coreconfigitem(
-    'share', 'poolnaming', default='identity',
+    b'share', b'poolnaming', default=b'identity',
 )
 coreconfigitem(
-    'shelve', 'maxbackups', default=10,
+    b'shelve', b'maxbackups', default=10,
 )
 coreconfigitem(
-    'smtp', 'host', default=None,
+    b'smtp', b'host', default=None,
 )
 coreconfigitem(
-    'smtp', 'local_hostname', default=None,
+    b'smtp', b'local_hostname', default=None,
 )
 coreconfigitem(
-    'smtp', 'password', default=None,
+    b'smtp', b'password', default=None,
 )
 coreconfigitem(
-    'smtp', 'port', default=dynamicdefault,
+    b'smtp', b'port', default=dynamicdefault,
 )
 coreconfigitem(
-    'smtp', 'tls', default='none',
+    b'smtp', b'tls', default=b'none',
 )
 coreconfigitem(
-    'smtp', 'username', default=None,
+    b'smtp', b'username', default=None,
 )
 coreconfigitem(
-    'sparse', 'missingwarning', default=True, experimental=True,
+    b'sparse', b'missingwarning', default=True, experimental=True,
 )
 coreconfigitem(
-    'subrepos',
-    'allowed',
+    b'subrepos',
+    b'allowed',
     default=dynamicdefault,  # to make backporting simpler
 )
 coreconfigitem(
-    'subrepos', 'hg:allowed', default=dynamicdefault,
+    b'subrepos', b'hg:allowed', default=dynamicdefault,
 )
 coreconfigitem(
-    'subrepos', 'git:allowed', default=dynamicdefault,
+    b'subrepos', b'git:allowed', default=dynamicdefault,
 )
 coreconfigitem(
-    'subrepos', 'svn:allowed', default=dynamicdefault,
+    b'subrepos', b'svn:allowed', default=dynamicdefault,
 )
 coreconfigitem(
-    'templates', '.*', default=None, generic=True,
+    b'templates', b'.*', default=None, generic=True,
 )
 coreconfigitem(
-    'templateconfig', '.*', default=dynamicdefault, generic=True,
+    b'templateconfig', b'.*', default=dynamicdefault, generic=True,
 )
 coreconfigitem(
-    'trusted', 'groups', default=list,
+    b'trusted', b'groups', default=list,
 )
 coreconfigitem(
-    'trusted', 'users', default=list,
+    b'trusted', b'users', default=list,
 )
 coreconfigitem(
-    'ui', '_usedassubrepo', default=False,
+    b'ui', b'_usedassubrepo', default=False,
 )
 coreconfigitem(
-    'ui', 'allowemptycommit', default=False,
+    b'ui', b'allowemptycommit', default=False,
 )
 coreconfigitem(
-    'ui', 'archivemeta', default=True,
+    b'ui', b'archivemeta', default=True,
 )
 coreconfigitem(
-    'ui', 'askusername', default=False,
+    b'ui', b'askusername', default=False,
 )
 coreconfigitem(
-    'ui', 'clonebundlefallback', default=False,
+    b'ui', b'clonebundlefallback', default=False,
 )
 coreconfigitem(
-    'ui', 'clonebundleprefers', default=list,
+    b'ui', b'clonebundleprefers', default=list,
 )
 coreconfigitem(
-    'ui', 'clonebundles', default=True,
+    b'ui', b'clonebundles', default=True,
 )
 coreconfigitem(
-    'ui', 'color', default='auto',
+    b'ui', b'color', default=b'auto',
 )
 coreconfigitem(
-    'ui', 'commitsubrepos', default=False,
+    b'ui', b'commitsubrepos', default=False,
 )
 coreconfigitem(
-    'ui', 'debug', default=False,
+    b'ui', b'debug', default=False,
 )
 coreconfigitem(
-    'ui', 'debugger', default=None,
+    b'ui', b'debugger', default=None,
 )
 coreconfigitem(
-    'ui', 'editor', default=dynamicdefault,
+    b'ui', b'editor', default=dynamicdefault,
 )
 coreconfigitem(
-    'ui', 'fallbackencoding', default=None,
+    b'ui', b'fallbackencoding', default=None,
 )
 coreconfigitem(
-    'ui', 'forcecwd', default=None,
+    b'ui', b'forcecwd', default=None,
 )
 coreconfigitem(
-    'ui', 'forcemerge', default=None,
+    b'ui', b'forcemerge', default=None,
 )
 coreconfigitem(
-    'ui', 'formatdebug', default=False,
+    b'ui', b'formatdebug', default=False,
 )
 coreconfigitem(
-    'ui', 'formatjson', default=False,
+    b'ui', b'formatjson', default=False,
 )
 coreconfigitem(
-    'ui', 'formatted', default=None,
+    b'ui', b'formatted', default=None,
 )
 coreconfigitem(
-    'ui', 'graphnodetemplate', default=None,
+    b'ui', b'graphnodetemplate', default=None,
 )
 coreconfigitem(
-    'ui', 'interactive', default=None,
+    b'ui', b'interactive', default=None,
 )
 coreconfigitem(
-    'ui', 'interface', default=None,
+    b'ui', b'interface', default=None,
 )
 coreconfigitem(
-    'ui', 'interface.chunkselector', default=None,
+    b'ui', b'interface.chunkselector', default=None,
 )
 coreconfigitem(
-    'ui', 'large-file-limit', default=10000000,
+    b'ui', b'large-file-limit', default=10000000,
 )
 coreconfigitem(
-    'ui', 'logblockedtimes', default=False,
+    b'ui', b'logblockedtimes', default=False,
 )
 coreconfigitem(
-    'ui', 'logtemplate', default=None,
+    b'ui', b'logtemplate', default=None,
 )
 coreconfigitem(
-    'ui', 'merge', default=None,
+    b'ui', b'merge', default=None,
 )
 coreconfigitem(
-    'ui', 'mergemarkers', default='basic',
+    b'ui', b'mergemarkers', default=b'basic',
 )
 coreconfigitem(
-    'ui',
-    'mergemarkertemplate',
+    b'ui',
+    b'mergemarkertemplate',
     default=(
-        '{node|short} '
-        '{ifeq(tags, "tip", "", '
-        'ifeq(tags, "", "", "{tags} "))}'
-        '{if(bookmarks, "{bookmarks} ")}'
-        '{ifeq(branch, "default", "", "{branch} ")}'
-        '- {author|user}: {desc|firstline}'
+        b'{node|short} '
+        b'{ifeq(tags, "tip", "", '
+        b'ifeq(tags, "", "", "{tags} "))}'
+        b'{if(bookmarks, "{bookmarks} ")}'
+        b'{ifeq(branch, "default", "", "{branch} ")}'
+        b'- {author|user}: {desc|firstline}'
     ),
 )
 coreconfigitem(
-    'ui', 'message-output', default='stdio',
+    b'ui', b'message-output', default=b'stdio',
 )
 coreconfigitem(
-    'ui', 'nontty', default=False,
+    b'ui', b'nontty', default=False,
 )
 coreconfigitem(
-    'ui', 'origbackuppath', default=None,
+    b'ui', b'origbackuppath', default=None,
 )
 coreconfigitem(
-    'ui', 'paginate', default=True,
+    b'ui', b'paginate', default=True,
 )
 coreconfigitem(
-    'ui', 'patch', default=None,
+    b'ui', b'patch', default=None,
 )
 coreconfigitem(
-    'ui', 'pre-merge-tool-output-template', default=None,
+    b'ui', b'pre-merge-tool-output-template', default=None,
 )
 coreconfigitem(
-    'ui', 'portablefilenames', default='warn',
+    b'ui', b'portablefilenames', default=b'warn',
 )
 coreconfigitem(
-    'ui', 'promptecho', default=False,
+    b'ui', b'promptecho', default=False,
 )
 coreconfigitem(
-    'ui', 'quiet', default=False,
+    b'ui', b'quiet', default=False,
 )
 coreconfigitem(
-    'ui', 'quietbookmarkmove', default=False,
+    b'ui', b'quietbookmarkmove', default=False,
 )
 coreconfigitem(
-    'ui', 'relative-paths', default='legacy',
+    b'ui', b'relative-paths', default=b'legacy',
 )
 coreconfigitem(
-    'ui', 'remotecmd', default='hg',
+    b'ui', b'remotecmd', default=b'hg',
 )
 coreconfigitem(
-    'ui', 'report_untrusted', default=True,
+    b'ui', b'report_untrusted', default=True,
 )
 coreconfigitem(
-    'ui', 'rollback', default=True,
+    b'ui', b'rollback', default=True,
 )
 coreconfigitem(
-    'ui', 'signal-safe-lock', default=True,
+    b'ui', b'signal-safe-lock', default=True,
 )
 coreconfigitem(
-    'ui', 'slash', default=False,
+    b'ui', b'slash', default=False,
 )
 coreconfigitem(
-    'ui', 'ssh', default='ssh',
+    b'ui', b'ssh', default=b'ssh',
 )
 coreconfigitem(
-    'ui', 'ssherrorhint', default=None,
+    b'ui', b'ssherrorhint', default=None,
 )
 coreconfigitem(
-    'ui', 'statuscopies', default=False,
+    b'ui', b'statuscopies', default=False,
 )
 coreconfigitem(
-    'ui', 'strict', default=False,
+    b'ui', b'strict', default=False,
 )
 coreconfigitem(
-    'ui', 'style', default='',
+    b'ui', b'style', default=b'',
 )
 coreconfigitem(
-    'ui', 'supportcontact', default=None,
+    b'ui', b'supportcontact', default=None,
 )
 coreconfigitem(
-    'ui', 'textwidth', default=78,
+    b'ui', b'textwidth', default=78,
 )
 coreconfigitem(
-    'ui', 'timeout', default='600',
+    b'ui', b'timeout', default=b'600',
 )
 coreconfigitem(
-    'ui', 'timeout.warn', default=0,
+    b'ui', b'timeout.warn', default=0,
 )
 coreconfigitem(
-    'ui', 'traceback', default=False,
+    b'ui', b'traceback', default=False,
 )
 coreconfigitem(
-    'ui', 'tweakdefaults', default=False,
+    b'ui', b'tweakdefaults', default=False,
 )
-coreconfigitem('ui', 'username', alias=[('ui', 'user')])
+coreconfigitem(b'ui', b'username', alias=[(b'ui', b'user')])
 coreconfigitem(
-    'ui', 'verbose', default=False,
+    b'ui', b'verbose', default=False,
 )
 coreconfigitem(
-    'verify', 'skipflags', default=None,
+    b'verify', b'skipflags', default=None,
 )
 coreconfigitem(
-    'web', 'allowbz2', default=False,
+    b'web', b'allowbz2', default=False,
 )
 coreconfigitem(
-    'web', 'allowgz', default=False,
+    b'web', b'allowgz', default=False,
 )
 coreconfigitem(
-    'web', 'allow-pull', alias=[('web', 'allowpull')], default=True,
+    b'web', b'allow-pull', alias=[(b'web', b'allowpull')], default=True,
 )
 coreconfigitem(
-    'web', 'allow-push', alias=[('web', 'allow_push')], default=list,
+    b'web', b'allow-push', alias=[(b'web', b'allow_push')], default=list,
 )
 coreconfigitem(
-    'web', 'allowzip', default=False,
+    b'web', b'allowzip', default=False,
 )
 coreconfigitem(
-    'web', 'archivesubrepos', default=False,
+    b'web', b'archivesubrepos', default=False,
 )
 coreconfigitem(
-    'web', 'cache', default=True,
+    b'web', b'cache', default=True,
 )
 coreconfigitem(
-    'web', 'comparisoncontext', default=5,
+    b'web', b'comparisoncontext', default=5,
 )
 coreconfigitem(
-    'web', 'contact', default=None,
+    b'web', b'contact', default=None,
 )
 coreconfigitem(
-    'web', 'deny_push', default=list,
+    b'web', b'deny_push', default=list,
 )
 coreconfigitem(
-    'web', 'guessmime', default=False,
+    b'web', b'guessmime', default=False,
 )
 coreconfigitem(
-    'web', 'hidden', default=False,
+    b'web', b'hidden', default=False,
 )
 coreconfigitem(
-    'web', 'labels', default=list,
+    b'web', b'labels', default=list,
 )
 coreconfigitem(
-    'web', 'logoimg', default='hglogo.png',
+    b'web', b'logoimg', default=b'hglogo.png',
 )
 coreconfigitem(
-    'web', 'logourl', default='https://mercurial-scm.org/',
+    b'web', b'logourl', default=b'https://mercurial-scm.org/',
 )
 coreconfigitem(
-    'web', 'accesslog', default='-',
+    b'web', b'accesslog', default=b'-',
 )
 coreconfigitem(
-    'web', 'address', default='',
+    b'web', b'address', default=b'',
 )
 coreconfigitem(
-    'web', 'allow-archive', alias=[('web', 'allow_archive')], default=list,
+    b'web', b'allow-archive', alias=[(b'web', b'allow_archive')], default=list,
 )
 coreconfigitem(
-    'web', 'allow_read', default=list,
+    b'web', b'allow_read', default=list,
 )
 coreconfigitem(
-    'web', 'baseurl', default=None,
+    b'web', b'baseurl', default=None,
 )
 coreconfigitem(
-    'web', 'cacerts', default=None,
+    b'web', b'cacerts', default=None,
 )
 coreconfigitem(
-    'web', 'certificate', default=None,
+    b'web', b'certificate', default=None,
 )
 coreconfigitem(
-    'web', 'collapse', default=False,
+    b'web', b'collapse', default=False,
 )
 coreconfigitem(
-    'web', 'csp', default=None,
+    b'web', b'csp', default=None,
 )
 coreconfigitem(
-    'web', 'deny_read', default=list,
+    b'web', b'deny_read', default=list,
 )
 coreconfigitem(
-    'web', 'descend', default=True,
+    b'web', b'descend', default=True,
 )
 coreconfigitem(
-    'web', 'description', default="",
+    b'web', b'description', default=b"",
 )
 coreconfigitem(
-    'web', 'encoding', default=lambda: encoding.encoding,
+    b'web', b'encoding', default=lambda: encoding.encoding,
 )
 coreconfigitem(
-    'web', 'errorlog', default='-',
+    b'web', b'errorlog', default=b'-',
 )
 coreconfigitem(
-    'web', 'ipv6', default=False,
+    b'web', b'ipv6', default=False,
 )
 coreconfigitem(
-    'web', 'maxchanges', default=10,
+    b'web', b'maxchanges', default=10,
 )
 coreconfigitem(
-    'web', 'maxfiles', default=10,
+    b'web', b'maxfiles', default=10,
 )
 coreconfigitem(
-    'web', 'maxshortchanges', default=60,
+    b'web', b'maxshortchanges', default=60,
 )
 coreconfigitem(
-    'web', 'motd', default='',
+    b'web', b'motd', default=b'',
 )
 coreconfigitem(
-    'web', 'name', default=dynamicdefault,
+    b'web', b'name', default=dynamicdefault,
 )
 coreconfigitem(
-    'web', 'port', default=8000,
+    b'web', b'port', default=8000,
 )
 coreconfigitem(
-    'web', 'prefix', default='',
+    b'web', b'prefix', default=b'',
 )
 coreconfigitem(
-    'web', 'push_ssl', default=True,
+    b'web', b'push_ssl', default=True,
 )
 coreconfigitem(
-    'web', 'refreshinterval', default=20,
+    b'web', b'refreshinterval', default=20,
 )
 coreconfigitem(
-    'web', 'server-header', default=None,
+    b'web', b'server-header', default=None,
 )
 coreconfigitem(
-    'web', 'static', default=None,
+    b'web', b'static', default=None,
 )
 coreconfigitem(
-    'web', 'staticurl', default=None,
+    b'web', b'staticurl', default=None,
 )
 coreconfigitem(
-    'web', 'stripes', default=1,
+    b'web', b'stripes', default=1,
 )
 coreconfigitem(
-    'web', 'style', default='paper',
+    b'web', b'style', default=b'paper',
 )
 coreconfigitem(
-    'web', 'templates', default=None,
+    b'web', b'templates', default=None,
 )
 coreconfigitem(
-    'web', 'view', default='served', experimental=True,
+    b'web', b'view', default=b'served', experimental=True,
 )
 coreconfigitem(
-    'worker', 'backgroundclose', default=dynamicdefault,
+    b'worker', b'backgroundclose', default=dynamicdefault,
 )
 # Windows defaults to a limit of 512 open files. A buffer of 128
 # should give us enough headway.
 coreconfigitem(
-    'worker', 'backgroundclosemaxqueue', default=384,
+    b'worker', b'backgroundclosemaxqueue', default=384,
 )
 coreconfigitem(
-    'worker', 'backgroundcloseminfilecount', default=2048,
+    b'worker', b'backgroundcloseminfilecount', default=2048,
 )
 coreconfigitem(
-    'worker', 'backgroundclosethreadcount', default=4,
+    b'worker', b'backgroundclosethreadcount', default=4,
 )
 coreconfigitem(
-    'worker', 'enabled', default=True,
+    b'worker', b'enabled', default=True,
 )
 coreconfigitem(
-    'worker', 'numcpus', default=None,
+    b'worker', b'numcpus', default=None,
 )
 
 # Rebase related configuration moved to core because other extension are doing
 # strange things. For example, shelve import the extensions to reuse some bit
 # without formally loading it.
 coreconfigitem(
-    'commands', 'rebase.requiredest', default=False,
+    b'commands', b'rebase.requiredest', default=False,
 )
 coreconfigitem(
-    'experimental', 'rebaseskipobsolete', default=True,
+    b'experimental', b'rebaseskipobsolete', default=True,
 )
 coreconfigitem(
-    'rebase', 'singletransaction', default=False,
+    b'rebase', b'singletransaction', default=False,
 )
 coreconfigitem(
-    'rebase', 'experimental.inmemory', default=False,
+    b'rebase', b'experimental.inmemory', default=False,
 )
--- a/mercurial/context.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/context.py	Sun Oct 06 09:48:39 2019 -0400
@@ -201,29 +201,29 @@
 
     def obsolete(self):
         """True if the changeset is obsolete"""
-        return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
+        return self.rev() in obsmod.getrevs(self._repo, b'obsolete')
 
     def extinct(self):
         """True if the changeset is extinct"""
-        return self.rev() in obsmod.getrevs(self._repo, 'extinct')
+        return self.rev() in obsmod.getrevs(self._repo, b'extinct')
 
     def orphan(self):
         """True if the changeset is not obsolete, but its ancestor is"""
-        return self.rev() in obsmod.getrevs(self._repo, 'orphan')
+        return self.rev() in obsmod.getrevs(self._repo, b'orphan')
 
     def phasedivergent(self):
         """True if the changeset tries to be a successor of a public changeset
 
         Only non-public and non-obsolete changesets may be phase-divergent.
         """
-        return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
+        return self.rev() in obsmod.getrevs(self._repo, b'phasedivergent')
 
     def contentdivergent(self):
         """Is a successor of a changeset with multiple possible successor sets
 
         Only non-public and non-obsolete changesets may be content-divergent.
         """
-        return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
+        return self.rev() in obsmod.getrevs(self._repo, b'contentdivergent')
 
     def isunstable(self):
         """True if the changeset is either orphan, phase-divergent or
@@ -240,11 +240,11 @@
         """
         instabilities = []
         if self.orphan():
-            instabilities.append('orphan')
+            instabilities.append(b'orphan')
         if self.phasedivergent():
-            instabilities.append('phase-divergent')
+            instabilities.append(b'phase-divergent')
         if self.contentdivergent():
-            instabilities.append('content-divergent')
+            instabilities.append(b'content-divergent')
         return instabilities
 
     def parents(self):
@@ -266,7 +266,7 @@
                 return self._manifest[path], self._manifest.flags(path)
             except KeyError:
                 raise error.ManifestLookupError(
-                    self._node, path, _('not found in manifest')
+                    self._node, path, _(b'not found in manifest')
                 )
         if r'_manifestdelta' in self.__dict__ or path in self.files():
             if path in self._manifestdelta:
@@ -279,7 +279,7 @@
             node, flag = mfl[self._changeset.manifest].find(path)
         except KeyError:
             raise error.ManifestLookupError(
-                self._node, path, _('not found in manifest')
+                self._node, path, _(b'not found in manifest')
             )
 
         return node, flag
@@ -291,7 +291,7 @@
         try:
             return self._fileinfo(path)[1]
         except error.LookupError:
-            return ''
+            return b''
 
     @propertycache
     def _copies(self):
@@ -321,7 +321,7 @@
         pats=None,
         include=None,
         exclude=None,
-        default='glob',
+        default=b'glob',
         listsubrepos=False,
         badfn=None,
     ):
@@ -446,7 +446,7 @@
                     listsubrepos=True,
                 )
                 for rfiles, sfiles in zip(r, s):
-                    rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
+                    rfiles.extend(b"%s/%s" % (subpath, f) for f in sfiles)
 
         for l in r:
             l.sort()
@@ -529,12 +529,12 @@
         return sorted(modified)
 
     def filesadded(self):
-        source = self._repo.ui.config('experimental', 'copies.read-from')
+        source = self._repo.ui.config(b'experimental', b'copies.read-from')
         filesadded = self._changeset.filesadded
-        if source == 'changeset-only':
+        if source == b'changeset-only':
             if filesadded is None:
                 filesadded = []
-        elif source == 'compatibility':
+        elif source == b'compatibility':
             if filesadded is None:
                 filesadded = scmutil.computechangesetfilesadded(self)
         else:
@@ -542,12 +542,12 @@
         return filesadded
 
     def filesremoved(self):
-        source = self._repo.ui.config('experimental', 'copies.read-from')
+        source = self._repo.ui.config(b'experimental', b'copies.read-from')
         filesremoved = self._changeset.filesremoved
-        if source == 'changeset-only':
+        if source == b'changeset-only':
             if filesremoved is None:
                 filesremoved = []
-        elif source == 'compatibility':
+        elif source == b'compatibility':
             if filesremoved is None:
                 filesremoved = scmutil.computechangesetfilesremoved(self)
         else:
@@ -556,7 +556,7 @@
 
     @propertycache
     def _copies(self):
-        source = self._repo.ui.config('experimental', 'copies.read-from')
+        source = self._repo.ui.config(b'experimental', b'copies.read-from')
         p1copies = self._changeset.p1copies
         p2copies = self._changeset.p2copies
         # If config says to get copy metadata only from changeset, then return
@@ -564,12 +564,12 @@
         # In compatibility mode, we return copy data from the changeset if
         # it was recorded there, and otherwise we fall back to getting it from
         # the filelogs (below).
-        if source == 'changeset-only':
+        if source == b'changeset-only':
             if p1copies is None:
                 p1copies = {}
             if p2copies is None:
                 p2copies = {}
-        elif source == 'compatibility':
+        elif source == b'compatibility':
             if p1copies is None:
                 # we are in compatiblity mode and there is not data in the
                 # changeset), we get the copy metadata from the filelogs.
@@ -584,10 +584,10 @@
         return self._changeset.description
 
     def branch(self):
-        return encoding.tolocal(self._changeset.extra.get("branch"))
+        return encoding.tolocal(self._changeset.extra.get(b"branch"))
 
     def closesbranch(self):
-        return 'close' in self._changeset.extra
+        return b'close' in self._changeset.extra
 
     def extra(self):
         """Return a dict of extra information."""
@@ -605,7 +605,7 @@
         return self._repo._phasecache.phase(self._repo, self._rev)
 
     def hidden(self):
-        return self._rev in repoview.filterrevs(self._repo, 'visible')
+        return self._rev in repoview.filterrevs(self._repo, b'visible')
 
     def isinmemory(self):
         return False
@@ -656,7 +656,7 @@
             anc = cahs[0]
         else:
             # experimental config: merge.preferancestor
-            for r in self._repo.ui.configlist('merge', 'preferancestor'):
+            for r in self._repo.ui.configlist(b'merge', b'preferancestor'):
                 try:
                     ctx = scmutil.revsymbol(self._repo, r)
                 except error.RepoLookupError:
@@ -669,13 +669,13 @@
             if warn:
                 self._repo.ui.status(
                     (
-                        _("note: using %s as ancestor of %s and %s\n")
+                        _(b"note: using %s as ancestor of %s and %s\n")
                         % (short(anc), short(self._node), short(n2))
                     )
-                    + ''.join(
+                    + b''.join(
                         _(
-                            "      alternatively, use --config "
-                            "merge.preferancestor=%s\n"
+                            b"      alternatively, use --config "
+                            b"merge.preferancestor=%s\n"
                         )
                         % short(n)
                         for n in sorted(cahs)
@@ -695,9 +695,9 @@
         def bad(fn, msg):
             # The manifest doesn't know about subrepos, so don't complain about
             # paths into valid subrepos.
-            if any(fn == s or fn.startswith(s + '/') for s in self.substate):
+            if any(fn == s or fn.startswith(s + b'/') for s in self.substate):
                 return
-            match.bad(fn, _('no such file in rev %s') % self)
+            match.bad(fn, _(b'no such file in rev %s') % self)
 
         m = matchmod.badmatch(self._repo.narrowmatch(match), bad)
         return self._manifest.walk(m)
@@ -757,9 +757,9 @@
 
     def __bytes__(self):
         try:
-            return "%s@%s" % (self.path(), self._changectx)
+            return b"%s@%s" % (self.path(), self._changectx)
         except error.LookupError:
-            return "%s@???" % self.path()
+            return b"%s@???" % self.path()
 
     __str__ = encoding.strmethod(__bytes__)
 
@@ -871,10 +871,10 @@
             return False
 
     def isexec(self):
-        return 'x' in self.flags()
+        return b'x' in self.flags()
 
     def islink(self):
-        return 'l' in self.flags()
+        return b'l' in self.flags()
 
     def isabsent(self):
         """whether this filectx represents a file not in self._changectx
@@ -895,7 +895,7 @@
 
         if self._filenode is None:
             raise error.ProgrammingError(
-                'filectx.cmp() must be reimplemented if not backed by revlog'
+                b'filectx.cmp() must be reimplemented if not backed by revlog'
             )
 
         if fctx._filenode is None:
@@ -1162,7 +1162,7 @@
 
         assert (
             changeid is not None or fileid is not None or changectx is not None
-        ), "bad args: changeid=%r, fileid=%r, changectx=%r" % (
+        ), b"bad args: changeid=%r, fileid=%r, changectx=%r" % (
             changeid,
             fileid,
             changectx,
@@ -1223,11 +1223,11 @@
         try:
             return self._filelog.read(self._filenode)
         except error.CensoredNodeError:
-            if self._repo.ui.config("censor", "policy") == "ignore":
-                return ""
+            if self._repo.ui.config(b"censor", b"policy") == b"ignore":
+                return b""
             raise error.Abort(
-                _("censored node: %s") % short(self._filenode),
-                hint=_("set censor.policy to ignore errors"),
+                _(b"censored node: %s") % short(self._filenode),
+                hint=_(b"set censor.policy to ignore errors"),
             )
 
     def size(self):
@@ -1275,7 +1275,7 @@
     def __init__(
         self,
         repo,
-        text="",
+        text=b"",
         user=None,
         date=None,
         extra=None,
@@ -1297,12 +1297,12 @@
         if extra:
             self._extra = extra.copy()
         if branch is not None:
-            self._extra['branch'] = encoding.fromlocal(branch)
-        if not self._extra.get('branch'):
-            self._extra['branch'] = 'default'
+            self._extra[b'branch'] = encoding.fromlocal(branch)
+        if not self._extra.get(b'branch'):
+            self._extra[b'branch'] = b'default'
 
     def __bytes__(self):
-        return bytes(self._parents[0]) + "+"
+        return bytes(self._parents[0]) + b"+"
 
     __str__ = encoding.strmethod(__bytes__)
 
@@ -1322,7 +1322,7 @@
     @propertycache
     def _date(self):
         ui = self._repo.ui
-        date = ui.configdate('devel', 'default-date')
+        date = ui.configdate(b'devel', b'default-date')
         if date is None:
             date = dateutil.makedate()
         return date
@@ -1364,10 +1364,10 @@
     filesremoved = removed
 
     def branch(self):
-        return encoding.tolocal(self._extra['branch'])
+        return encoding.tolocal(self._extra[b'branch'])
 
     def closesbranch(self):
-        return 'close' in self._extra
+        return b'close' in self._extra
 
     def extra(self):
         return self._extra
@@ -1433,14 +1433,14 @@
     """
 
     def __init__(
-        self, repo, text="", user=None, date=None, extra=None, changes=None
+        self, repo, text=b"", user=None, date=None, extra=None, changes=None
     ):
         branch = None
-        if not extra or 'branch' not in extra:
+        if not extra or b'branch' not in extra:
             try:
                 branch = repo.dirstate.branch()
             except UnicodeDecodeError:
-                raise error.Abort(_('branch name not in UTF-8!'))
+                raise error.Abort(_(b'branch name not in UTF-8!'))
         super(workingctx, self).__init__(
             repo, text, user, date, extra, changes, branch=branch
         )
@@ -1448,11 +1448,11 @@
     def __iter__(self):
         d = self._repo.dirstate
         for f in d:
-            if d[f] != 'r':
+            if d[f] != b'r':
                 yield f
 
     def __contains__(self, key):
-        return self._repo.dirstate[key] not in "?r"
+        return self._repo.dirstate[key] not in b"?r"
 
     def hex(self):
         return wdirhex
@@ -1501,7 +1501,7 @@
                     return fl2
                 if fl2 == fla:
                     return fl1
-                return ''  # punt for conflicts
+                return b''  # punt for conflicts
 
         return func
 
@@ -1514,12 +1514,12 @@
             try:
                 return self._manifest.flags(path)
             except KeyError:
-                return ''
+                return b''
 
         try:
             return self._flagfunc(path)
         except OSError:
-            return ''
+            return b''
 
     def filectx(self, path, filelog=None):
         """get a file context from the working directory"""
@@ -1528,7 +1528,7 @@
         )
 
     def dirty(self, missing=False, merge=True, branch=True):
-        "check whether a working directory is modified"
+        b"check whether a working directory is modified"
         # check subrepos first
         for s in sorted(self.substate):
             if self.sub(s).dirty(missing=missing):
@@ -1543,7 +1543,7 @@
             or (missing and self.deleted())
         )
 
-    def add(self, list, prefix=""):
+    def add(self, list, prefix=b""):
         with self._repo.wlock():
             ui, ds = self._repo.ui, self._repo.dirstate
             uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
@@ -1557,47 +1557,47 @@
                 try:
                     st = lstat(f)
                 except OSError:
-                    ui.warn(_("%s does not exist!\n") % uipath(f))
+                    ui.warn(_(b"%s does not exist!\n") % uipath(f))
                     rejected.append(f)
                     continue
-                limit = ui.configbytes('ui', 'large-file-limit')
+                limit = ui.configbytes(b'ui', b'large-file-limit')
                 if limit != 0 and st.st_size > limit:
                     ui.warn(
                         _(
-                            "%s: up to %d MB of RAM may be required "
-                            "to manage this file\n"
-                            "(use 'hg revert %s' to cancel the "
-                            "pending addition)\n"
+                            b"%s: up to %d MB of RAM may be required "
+                            b"to manage this file\n"
+                            b"(use 'hg revert %s' to cancel the "
+                            b"pending addition)\n"
                         )
                         % (f, 3 * st.st_size // 1000000, uipath(f))
                     )
                 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
                     ui.warn(
                         _(
-                            "%s not added: only files and symlinks "
-                            "supported currently\n"
+                            b"%s not added: only files and symlinks "
+                            b"supported currently\n"
                         )
                         % uipath(f)
                     )
                     rejected.append(f)
-                elif ds[f] in 'amn':
-                    ui.warn(_("%s already tracked!\n") % uipath(f))
-                elif ds[f] == 'r':
+                elif ds[f] in b'amn':
+                    ui.warn(_(b"%s already tracked!\n") % uipath(f))
+                elif ds[f] == b'r':
                     ds.normallookup(f)
                 else:
                     ds.add(f)
             return rejected
 
-    def forget(self, files, prefix=""):
+    def forget(self, files, prefix=b""):
         with self._repo.wlock():
             ds = self._repo.dirstate
             uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
             rejected = []
             for f in files:
                 if f not in ds:
-                    self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
+                    self._repo.ui.warn(_(b"%s not tracked!\n") % uipath(f))
                     rejected.append(f)
-                elif ds[f] != 'a':
+                elif ds[f] != b'a':
                     ds.remove(f)
                 else:
                     ds.drop(f)
@@ -1610,20 +1610,20 @@
             if err.errno != errno.ENOENT:
                 raise
             self._repo.ui.warn(
-                _("%s does not exist!\n") % self._repo.dirstate.pathto(dest)
+                _(b"%s does not exist!\n") % self._repo.dirstate.pathto(dest)
             )
             return
         if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
             self._repo.ui.warn(
-                _("copy failed: %s is not a file or a " "symbolic link\n")
+                _(b"copy failed: %s is not a file or a " b"symbolic link\n")
                 % self._repo.dirstate.pathto(dest)
             )
         else:
             with self._repo.wlock():
                 ds = self._repo.dirstate
-                if ds[dest] in '?':
+                if ds[dest] in b'?':
                     ds.add(dest)
-                elif ds[dest] in 'r':
+                elif ds[dest] in b'r':
                     ds.normallookup(dest)
                 ds.copy(source, dest)
 
@@ -1632,7 +1632,7 @@
         pats=None,
         include=None,
         exclude=None,
-        default='glob',
+        default=b'glob',
         listsubrepos=False,
         badfn=None,
     ):
@@ -1665,16 +1665,16 @@
         # symlink
         sane = []
         for f in files:
-            if self.flags(f) == 'l':
+            if self.flags(f) == b'l':
                 d = self[f].data()
                 if (
-                    d == ''
+                    d == b''
                     or len(d) >= 1024
-                    or '\n' in d
+                    or b'\n' in d
                     or stringutil.binary(d)
                 ):
                     self._repo.ui.debug(
-                        'ignoring suspect symlink placeholder' ' "%s"\n' % f
+                        b'ignoring suspect symlink placeholder' b' "%s"\n' % f
                     )
                     continue
             sane.append(f)
@@ -1746,7 +1746,7 @@
                         # already changed simultaneously after last
                         # caching (see also issue5584 for detail)
                         self._repo.ui.debug(
-                            'skip updating dirstate: ' 'identity mismatch\n'
+                            b'skip updating dirstate: ' b'identity mismatch\n'
                         )
             except error.LockError:
                 pass
@@ -1757,7 +1757,7 @@
     def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
         '''Gets the status from the dirstate -- internal use only.'''
         subrepos = []
-        if '.hgsub' in self:
+        if b'.hgsub' in self:
             subrepos = sorted(self.substate)
         cmp, s = self._repo.dirstate.status(
             match, subrepos, ignored=ignored, clean=clean, unknown=unknown
@@ -1855,7 +1855,7 @@
         # might have accidentally ended up with the entire contents of the file
         # they are supposed to be linking to.
         s.modified[:] = self._filtersuspectsymlink(s.modified)
-        if other != self._repo['.']:
+        if other != self._repo[b'.']:
             s = super(workingctx, self)._buildstatus(
                 other, s, match, listignored, listclean, listunknown
             )
@@ -1871,14 +1871,14 @@
         If we aren't comparing against the working directory's parent, then we
         just use the default match object sent to us.
         """
-        if other != self._repo['.']:
+        if other != self._repo[b'.']:
 
             def bad(f, msg):
                 # 'f' may be a directory pattern from 'match.files()',
                 # so 'f not in ctx1' is not enough
                 if f not in other and not other.hasdir(f):
                     self._repo.ui.warn(
-                        '%s: %s\n' % (self._repo.dirstate.pathto(f), msg)
+                        b'%s: %s\n' % (self._repo.dirstate.pathto(f), msg)
                     )
 
             match.bad = bad
@@ -1898,7 +1898,7 @@
     def matches(self, match):
         match = self._repo.narrowmatch(match)
         ds = self._repo.dirstate
-        return sorted(f for f in ds.matches(match) if ds[f] != 'r')
+        return sorted(f for f in ds.matches(match) if ds[f] != b'r')
 
     def markcommitted(self, node):
         with self._repo.dirstate.parentchange():
@@ -2027,7 +2027,7 @@
 
     def remove(self, ignoremissing=False):
         """wraps unlink for a repo's working directory"""
-        rmdir = self._repo.ui.configbool('experimental', 'removeemptydirs')
+        rmdir = self._repo.ui.configbool(b'experimental', b'removeemptydirs')
         self._repo.wvfs.unlinkpath(
             self._path, ignoremissing=ignoremissing, rmdir=rmdir
         )
@@ -2049,7 +2049,9 @@
         wvfs = self._repo.wvfs
         f = self._path
         wvfs.audit(f)
-        if self._repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
+        if self._repo.ui.configbool(
+            b'experimental', b'merge.checkpathconflicts'
+        ):
             # remove files under the directory as they should already be
             # warned and backed up
             if wvfs.isdir(f) and not wvfs.islink(f):
@@ -2092,19 +2094,19 @@
         # Drop old manifest cache as it is now out of date.
         # This is necessary when, e.g., rebasing several nodes with one
         # ``overlayworkingctx`` (e.g. with --collapse).
-        util.clearcachedproperty(self, '_manifest')
+        util.clearcachedproperty(self, b'_manifest')
 
     def data(self, path):
         if self.isdirty(path):
-            if self._cache[path]['exists']:
-                if self._cache[path]['data'] is not None:
-                    return self._cache[path]['data']
+            if self._cache[path][b'exists']:
+                if self._cache[path][b'data'] is not None:
+                    return self._cache[path][b'data']
                 else:
                     # Must fallback here, too, because we only set flags.
                     return self._wrappedctx[path].data()
             else:
                 raise error.ProgrammingError(
-                    "No such file or directory: %s" % path
+                    b"No such file or directory: %s" % path
                 )
         else:
             return self._wrappedctx[path].data()
@@ -2128,7 +2130,7 @@
     @propertycache
     def _flagfunc(self):
         def f(path):
-            return self._cache[path]['flags']
+            return self._cache[path][b'flags']
 
         return f
 
@@ -2139,21 +2141,21 @@
         return [
             f
             for f in self._cache.keys()
-            if self._cache[f]['exists'] and self._existsinparent(f)
+            if self._cache[f][b'exists'] and self._existsinparent(f)
         ]
 
     def added(self):
         return [
             f
             for f in self._cache.keys()
-            if self._cache[f]['exists'] and not self._existsinparent(f)
+            if self._cache[f][b'exists'] and not self._existsinparent(f)
         ]
 
     def removed(self):
         return [
             f
             for f in self._cache.keys()
-            if not self._cache[f]['exists'] and self._existsinparent(f)
+            if not self._cache[f][b'exists'] and self._existsinparent(f)
         ]
 
     def p1copies(self):
@@ -2163,7 +2165,7 @@
             if not narrowmatch(f):
                 continue
             copies.pop(f, None)  # delete if it exists
-            source = self._cache[f]['copied']
+            source = self._cache[f][b'copied']
             if source:
                 copies[f] = source
         return copies
@@ -2175,7 +2177,7 @@
             if not narrowmatch(f):
                 continue
             copies.pop(f, None)  # delete if it exists
-            source = self._cache[f]['copied']
+            source = self._cache[f][b'copied']
             if source:
                 copies[f] = source
         return copies
@@ -2185,7 +2187,7 @@
 
     def filedate(self, path):
         if self.isdirty(path):
-            return self._cache[path]['date']
+            return self._cache[path][b'date']
         else:
             return self._wrappedctx[path].date()
 
@@ -2200,24 +2202,24 @@
 
     def copydata(self, path):
         if self.isdirty(path):
-            return self._cache[path]['copied']
+            return self._cache[path][b'copied']
         else:
             return None
 
     def flags(self, path):
         if self.isdirty(path):
-            if self._cache[path]['exists']:
-                return self._cache[path]['flags']
+            if self._cache[path][b'exists']:
+                return self._cache[path][b'flags']
             else:
                 raise error.ProgrammingError(
-                    "No such file or directory: %s" % self._path
+                    b"No such file or directory: %s" % self._path
                 )
         else:
             return self._wrappedctx[path].flags()
 
     def __contains__(self, key):
         if key in self._cache:
-            return self._cache[key]['exists']
+            return self._cache[key][b'exists']
         return key in self.p1()
 
     def _existsinparent(self, path):
@@ -2241,22 +2243,22 @@
         def fail(path, component):
             # p1() is the base and we're receiving "writes" for p2()'s
             # files.
-            if 'l' in self.p1()[component].flags():
+            if b'l' in self.p1()[component].flags():
                 raise error.Abort(
-                    "error: %s conflicts with symlink %s "
-                    "in %d." % (path, component, self.p1().rev())
+                    b"error: %s conflicts with symlink %s "
+                    b"in %d." % (path, component, self.p1().rev())
                 )
             else:
                 raise error.Abort(
-                    "error: '%s' conflicts with file '%s' in "
-                    "%d." % (path, component, self.p1().rev())
+                    b"error: '%s' conflicts with file '%s' in "
+                    b"%d." % (path, component, self.p1().rev())
                 )
 
         # Test that each new directory to be created to write this path from p2
         # is not a file in p1.
-        components = path.split('/')
+        components = path.split(b'/')
         for i in pycompat.xrange(len(components)):
-            component = "/".join(components[0:i])
+            component = b"/".join(components[0:i])
             if component in self:
                 fail(path, component)
 
@@ -2273,26 +2275,26 @@
             if not mfiles:
                 return
             raise error.Abort(
-                "error: file '%s' cannot be written because "
-                " '%s/' is a directory in %s (containing %d "
-                "entries: %s)"
-                % (path, path, self.p1(), len(mfiles), ', '.join(mfiles))
+                b"error: file '%s' cannot be written because "
+                b" '%s/' is a directory in %s (containing %d "
+                b"entries: %s)"
+                % (path, path, self.p1(), len(mfiles), b', '.join(mfiles))
             )
 
-    def write(self, path, data, flags='', **kwargs):
+    def write(self, path, data, flags=b'', **kwargs):
         if data is None:
-            raise error.ProgrammingError("data must be non-None")
+            raise error.ProgrammingError(b"data must be non-None")
         self._auditconflicts(path)
         self._markdirty(
             path, exists=True, data=data, date=dateutil.makedate(), flags=flags
         )
 
     def setflags(self, path, l, x):
-        flag = ''
+        flag = b''
         if l:
-            flag = 'l'
+            flag = b'l'
         elif x:
-            flag = 'x'
+            flag = b'x'
         self._markdirty(path, exists=True, date=dateutil.makedate(), flags=flag)
 
     def remove(self, path):
@@ -2306,29 +2308,29 @@
             # If this path exists and is a symlink, "follow" it by calling
             # exists on the destination path.
             if (
-                self._cache[path]['exists']
-                and 'l' in self._cache[path]['flags']
+                self._cache[path][b'exists']
+                and b'l' in self._cache[path][b'flags']
             ):
-                return self.exists(self._cache[path]['data'].strip())
+                return self.exists(self._cache[path][b'data'].strip())
             else:
-                return self._cache[path]['exists']
+                return self._cache[path][b'exists']
 
         return self._existsinparent(path)
 
     def lexists(self, path):
         """lexists returns True if the path exists"""
         if self.isdirty(path):
-            return self._cache[path]['exists']
+            return self._cache[path][b'exists']
 
         return self._existsinparent(path)
 
     def size(self, path):
         if self.isdirty(path):
-            if self._cache[path]['exists']:
-                return len(self._cache[path]['data'])
+            if self._cache[path][b'exists']:
+                return len(self._cache[path][b'data'])
             else:
                 raise error.ProgrammingError(
-                    "No such file or directory: %s" % self._path
+                    b"No such file or directory: %s" % self._path
                 )
         return self._wrappedctx[path].size()
 
@@ -2363,15 +2365,15 @@
         files = self.files()
 
         def getfile(repo, memctx, path):
-            if self._cache[path]['exists']:
+            if self._cache[path][b'exists']:
                 return memfilectx(
                     repo,
                     memctx,
                     path,
-                    self._cache[path]['data'],
-                    'l' in self._cache[path]['flags'],
-                    'x' in self._cache[path]['flags'],
-                    self._cache[path]['copied'],
+                    self._cache[path][b'data'],
+                    b'l' in self._cache[path][b'flags'],
+                    b'x' in self._cache[path][b'flags'],
+                    self._cache[path][b'copied'],
                 )
             else:
                 # Returning None, but including the path in `files`, is
@@ -2424,8 +2426,8 @@
             try:
                 underlying = self._wrappedctx[path]
                 if (
-                    underlying.data() == cache['data']
-                    and underlying.flags() == cache['flags']
+                    underlying.data() == cache[b'data']
+                    and underlying.flags() == cache[b'flags']
                 ):
                     keys.append(path)
             except error.ManifestLookupError:
@@ -2437,23 +2439,23 @@
         return keys
 
     def _markdirty(
-        self, path, exists, data=None, date=None, flags='', copied=None
+        self, path, exists, data=None, date=None, flags=b'', copied=None
     ):
         # data not provided, let's see if we already have some; if not, let's
         # grab it from our underlying context, so that we always have data if
         # the file is marked as existing.
         if exists and data is None:
             oldentry = self._cache.get(path) or {}
-            data = oldentry.get('data')
+            data = oldentry.get(b'data')
             if data is None:
                 data = self._wrappedctx[path].data()
 
         self._cache[path] = {
-            'exists': exists,
-            'data': data,
-            'date': date,
-            'flags': flags,
-            'copied': copied,
+            b'exists': exists,
+            b'data': data,
+            b'date': date,
+            b'flags': flags,
+            b'copied': copied,
         }
 
     def filectx(self, path, filelog=None):
@@ -2527,7 +2529,7 @@
     """
 
     def __init__(
-        self, repo, changes, text="", user=None, date=None, extra=None
+        self, repo, changes, text=b"", user=None, date=None, extra=None
     ):
         super(workingcommitctx, self).__init__(
             repo, text, user, date, extra, changes
@@ -2781,11 +2783,11 @@
         super(memfilectx, self).__init__(repo, path, None, changectx)
         self._data = data
         if islink:
-            self._flags = 'l'
+            self._flags = b'l'
         elif isexec:
-            self._flags = 'x'
+            self._flags = b'x'
         else:
-            self._flags = ''
+            self._flags = b''
         self._copysource = copysource
 
     def copysource(self):
@@ -2930,7 +2932,7 @@
     def cmp(self, fctx):
         # filecmp follows symlinks whereas `cmp` should not, so skip the fast
         # path if either side is a symlink.
-        symlinks = 'l' in self.flags() or 'l' in fctx.flags()
+        symlinks = b'l' in self.flags() or b'l' in fctx.flags()
         if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
             # Add a fast-path for merge if both sides are disk-backed.
             # Note that filecmp uses the opposite return values (True if same)
@@ -2942,13 +2944,13 @@
         return self._path
 
     def flags(self):
-        return ''
+        return b''
 
     def data(self):
         return util.readfile(self._path)
 
     def decodeddata(self):
-        with open(self._path, "rb") as f:
+        with open(self._path, b"rb") as f:
             return f.read()
 
     def remove(self):
@@ -2956,5 +2958,5 @@
 
     def write(self, data, flags, **kwargs):
         assert not flags
-        with open(self._path, "wb") as f:
+        with open(self._path, b"wb") as f:
             f.write(data)
--- a/mercurial/copies.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/copies.py	Sun Oct 06 09:48:39 2019 -0400
@@ -170,7 +170,7 @@
     ds = repo.dirstate
     c = ds.copies().copy()
     for k in list(c):
-        if ds[k] not in 'anm' or (match and not match(k)):
+        if ds[k] not in b'anm' or (match and not match(k)):
             del c[k]
     return c
 
@@ -187,8 +187,8 @@
 
 def usechangesetcentricalgo(repo):
     """Checks if we should use changeset-centric copy algorithms"""
-    readfrom = repo.ui.config('experimental', 'copies.read-from')
-    changesetsource = ('changeset-only', 'compatibility')
+    readfrom = repo.ui.config(b'experimental', b'copies.read-from')
+    changesetsource = (b'changeset-only', b'compatibility')
     return readfrom in changesetsource
 
 
@@ -201,13 +201,13 @@
     if usechangesetcentricalgo(repo):
         return _changesetforwardcopies(a, b, match)
 
-    debug = repo.ui.debugflag and repo.ui.configbool('devel', 'debug.copies')
+    debug = repo.ui.debugflag and repo.ui.configbool(b'devel', b'debug.copies')
     dbg = repo.ui.debug
     if debug:
-        dbg('debug.copies:    looking into rename from %s to %s\n' % (a, b))
+        dbg(b'debug.copies:    looking into rename from %s to %s\n' % (a, b))
     limit = _findlimit(repo, a, b)
     if debug:
-        dbg('debug.copies:      search limit: %d\n' % limit)
+        dbg(b'debug.copies:      search limit: %d\n' % limit)
     am = a.manifest()
     basemf = None if base is None else base.manifest()
 
@@ -231,11 +231,11 @@
     ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True)
 
     if debug:
-        dbg('debug.copies:      missing files to search: %d\n' % len(missing))
+        dbg(b'debug.copies:      missing files to search: %d\n' % len(missing))
 
     for f in sorted(missing):
         if debug:
-            dbg('debug.copies:        tracing file: %s\n' % f)
+            dbg(b'debug.copies:        tracing file: %s\n' % f)
         fctx = b[f]
         fctx._ancestrycontext = ancestrycontext
 
@@ -244,11 +244,11 @@
         opath = _tracefile(fctx, am, basemf, limit)
         if opath:
             if debug:
-                dbg('debug.copies:          rename of: %s\n' % opath)
+                dbg(b'debug.copies:          rename of: %s\n' % opath)
             cm[f] = opath
         if debug:
             dbg(
-                'debug.copies:          time: %f seconds\n'
+                b'debug.copies:          time: %f seconds\n'
                 % (util.timer() - start)
             )
     return cm
@@ -342,7 +342,7 @@
 
 
 def _backwardrenames(a, b, match):
-    if a._repo.ui.config('experimental', 'copytrace') == 'off':
+    if a._repo.ui.config(b'experimental', b'copytrace') == b'off':
         return {}
 
     # Even though we're not taking copies into account, 1:n rename situations
@@ -366,26 +366,28 @@
 def pathcopies(x, y, match=None):
     """find {dst@y: src@x} copy mapping for directed compare"""
     repo = x._repo
-    debug = repo.ui.debugflag and repo.ui.configbool('devel', 'debug.copies')
+    debug = repo.ui.debugflag and repo.ui.configbool(b'devel', b'debug.copies')
     if debug:
-        repo.ui.debug('debug.copies: searching copies from %s to %s\n' % (x, y))
+        repo.ui.debug(
+            b'debug.copies: searching copies from %s to %s\n' % (x, y)
+        )
     if x == y or not x or not y:
         return {}
     a = y.ancestor(x)
     if a == x:
         if debug:
-            repo.ui.debug('debug.copies: search mode: forward\n')
+            repo.ui.debug(b'debug.copies: search mode: forward\n')
         if y.rev() is None and x == y.p1():
             # short-circuit to avoid issues with merge states
             return _dirstatecopies(repo, match)
         copies = _forwardcopies(x, y, match=match)
     elif a == y:
         if debug:
-            repo.ui.debug('debug.copies: search mode: backward\n')
+            repo.ui.debug(b'debug.copies: search mode: backward\n')
         copies = _backwardrenames(x, y, match=match)
     else:
         if debug:
-            repo.ui.debug('debug.copies: search mode: combined\n')
+            repo.ui.debug(b'debug.copies: search mode: combined\n')
         base = None
         if a.rev() != node.nullrev:
             base = x
@@ -453,7 +455,7 @@
     if c2.node() is None and c1.node() == repo.dirstate.p1():
         return _dirstatecopies(repo, narrowmatch), {}, {}, {}, {}
 
-    copytracing = repo.ui.config('experimental', 'copytrace')
+    copytracing = repo.ui.config(b'experimental', b'copytrace')
     if stringutil.parsebool(copytracing) is False:
         # stringutil.parsebool() returns None when it is unable to parse the
         # value, so we should rely on making sure copytracing is on such cases
@@ -466,7 +468,7 @@
     # Copy trace disabling is explicitly below the node == p1 logic above
     # because the logic above is required for a simple copy to be kept across a
     # rebase.
-    if copytracing == 'heuristics':
+    if copytracing == b'heuristics':
         # Do full copytracing if only non-public revisions are involved as
         # that will be fast enough and will also cover the copies which could
         # be missed by heuristics
@@ -490,9 +492,9 @@
         c1 = c1.p1()
     if c1.mutable() and base.mutable():
         sourcecommitlimit = repo.ui.configint(
-            'experimental', 'copytrace.sourcecommitlimit'
+            b'experimental', b'copytrace.sourcecommitlimit'
         )
-        commits = len(repo.revs('%d::%d', base.rev(), c1.rev()))
+        commits = len(repo.revs(b'%d::%d', base.rev(), c1.rev()))
         return commits < sourcecommitlimit
     return False
 
@@ -592,11 +594,11 @@
     u1 = sorted(addedinm1 - addedinm2)
     u2 = sorted(addedinm2 - addedinm1)
 
-    header = "  unmatched files in %s"
+    header = b"  unmatched files in %s"
     if u1:
-        repo.ui.debug("%s:\n   %s\n" % (header % 'local', "\n   ".join(u1)))
+        repo.ui.debug(b"%s:\n   %s\n" % (header % b'local', b"\n   ".join(u1)))
     if u2:
-        repo.ui.debug("%s:\n   %s\n" % (header % 'other', "\n   ".join(u2)))
+        repo.ui.debug(b"%s:\n   %s\n" % (header % b'other', b"\n   ".join(u2)))
 
     fullcopy = copies1.copy()
     fullcopy.update(copies2)
@@ -605,23 +607,23 @@
 
     if repo.ui.debugflag:
         repo.ui.debug(
-            "  all copies found (* = to merge, ! = divergent, "
-            "% = renamed and deleted):\n"
+            b"  all copies found (* = to merge, ! = divergent, "
+            b"% = renamed and deleted):\n"
         )
         for f in sorted(fullcopy):
-            note = ""
+            note = b""
             if f in copy:
-                note += "*"
+                note += b"*"
             if f in divergeset:
-                note += "!"
+                note += b"!"
             if f in renamedeleteset:
-                note += "%"
+                note += b"%"
             repo.ui.debug(
-                "   src: '%s' -> dst: '%s' %s\n" % (fullcopy[f], f, note)
+                b"   src: '%s' -> dst: '%s' %s\n" % (fullcopy[f], f, note)
             )
     del divergeset
 
-    repo.ui.debug("  checking for directory renames\n")
+    repo.ui.debug(b"  checking for directory renames\n")
 
     # generate a directory move map
     d1, d2 = c1.dirs(), c2.dirs()
@@ -656,11 +658,11 @@
     if not dirmove:
         return copy, {}, diverge, renamedelete, {}
 
-    dirmove = {k + "/": v + "/" for k, v in dirmove.iteritems()}
+    dirmove = {k + b"/": v + b"/" for k, v in dirmove.iteritems()}
 
     for d in dirmove:
         repo.ui.debug(
-            "   discovered dir src: '%s' -> dst: '%s'\n" % (d, dirmove[d])
+            b"   discovered dir src: '%s' -> dst: '%s'\n" % (d, dirmove[d])
         )
 
     movewithdir = {}
@@ -674,7 +676,7 @@
                     if df not in copy:
                         movewithdir[f] = df
                         repo.ui.debug(
-                            ("   pending file src: '%s' -> " "dst: '%s'\n")
+                            (b"   pending file src: '%s' -> " b"dst: '%s'\n")
                             % (f, df)
                         )
                     break
@@ -716,11 +718,11 @@
 
     changedfiles = set()
     m1 = c1.manifest()
-    if not repo.revs('%d::%d', base.rev(), c2.rev()):
+    if not repo.revs(b'%d::%d', base.rev(), c2.rev()):
         # If base is not in c2 branch, we switch to fullcopytracing
         repo.ui.debug(
-            "switching to full copytracing as base is not "
-            "an ancestor of c2\n"
+            b"switching to full copytracing as base is not "
+            b"an ancestor of c2\n"
         )
         return _fullcopytracing(repo, c1, c2, base)
 
@@ -728,7 +730,7 @@
     while ctx != base:
         if len(ctx.parents()) == 2:
             # To keep things simple let's not handle merges
-            repo.ui.debug("switching to full copytracing because of merges\n")
+            repo.ui.debug(b"switching to full copytracing because of merges\n")
             return _fullcopytracing(repo, c1, c2, base)
         changedfiles.update(ctx.files())
         ctx = ctx.p1()
@@ -767,14 +769,14 @@
             # we can have a lot of candidates which can slow down the heuristics
             # config value to limit the number of candidates moves to check
             maxcandidates = repo.ui.configint(
-                'experimental', 'copytrace.movecandidateslimit'
+                b'experimental', b'copytrace.movecandidateslimit'
             )
 
             if len(movecandidates) > maxcandidates:
                 repo.ui.status(
                     _(
-                        "skipping copytracing for '%s', more "
-                        "candidates than the limit: %d\n"
+                        b"skipping copytracing for '%s', more "
+                        b"candidates than the limit: %d\n"
                     )
                     % (f, len(movecandidates))
                 )
@@ -833,10 +835,10 @@
     copies between fromrev and rev.
     """
     exclude = {}
-    ctraceconfig = repo.ui.config('experimental', 'copytrace')
+    ctraceconfig = repo.ui.config(b'experimental', b'copytrace')
     bctrace = stringutil.parsebool(ctraceconfig)
     if skiprev is not None and (
-        ctraceconfig == 'heuristics' or bctrace or bctrace is None
+        ctraceconfig == b'heuristics' or bctrace or bctrace is None
     ):
         # copytrace='off' skips this line, but not the entire function because
         # the line below is O(size of the repo) during a rebase, while the rest
--- a/mercurial/crecord.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/crecord.py	Sun Oct 06 09:48:39 2019 -0400
@@ -83,7 +83,7 @@
     This method returns True if curses is found (and that python is built with
     it) and that the user has the correct flag for the ui.
     """
-    return curses and ui.interface("chunkselector") == "curses"
+    return curses and ui.interface(b"chunkselector") == b"curses"
 
 
 class patchnode(object):
@@ -92,14 +92,14 @@
     """
 
     def firstchild(self):
-        raise NotImplementedError("method must be implemented by subclass")
+        raise NotImplementedError(b"method must be implemented by subclass")
 
     def lastchild(self):
-        raise NotImplementedError("method must be implemented by subclass")
+        raise NotImplementedError(b"method must be implemented by subclass")
 
     def allchildren(self):
-        "Return a list of all of the direct children of this node"
-        raise NotImplementedError("method must be implemented by subclass")
+        b"Return a list of all of the direct children of this node"
+        raise NotImplementedError(b"method must be implemented by subclass")
 
     def nextsibling(self):
         """
@@ -107,7 +107,7 @@
         of different types between the current item and this closest item.
         If no such item exists, return None.
         """
-        raise NotImplementedError("method must be implemented by subclass")
+        raise NotImplementedError(b"method must be implemented by subclass")
 
     def prevsibling(self):
         """
@@ -115,10 +115,10 @@
         items of different types between the current item and this closest item.
         If no such item exists, return None.
         """
-        raise NotImplementedError("method must be implemented by subclass")
+        raise NotImplementedError(b"method must be implemented by subclass")
 
     def parentitem(self):
-        raise NotImplementedError("method must be implemented by subclass")
+        raise NotImplementedError(b"method must be implemented by subclass")
 
     def nextitem(self, skipfolded=True):
         """
@@ -260,21 +260,21 @@
         return None
 
     def firstchild(self):
-        "return the first child of this item, if one exists.  otherwise None."
+        b"return the first child of this item, if one exists.  otherwise None."
         if len(self.hunks) > 0:
             return self.hunks[0]
         else:
             return None
 
     def lastchild(self):
-        "return the last child of this item, if one exists.  otherwise None."
+        b"return the last child of this item, if one exists.  otherwise None."
         if len(self.hunks) > 0:
             return self.hunks[-1]
         else:
             return None
 
     def allchildren(self):
-        "return a list of all of the direct children of this node"
+        b"return a list of all of the direct children of this node"
         return self.hunks
 
     def __getattr__(self, name):
@@ -282,7 +282,7 @@
 
 
 class uihunkline(patchnode):
-    "represents a changed line in a hunk"
+    b"represents a changed line in a hunk"
 
     def __init__(self, linetext, hunk):
         self.linetext = linetext
@@ -315,16 +315,16 @@
             return None
 
     def parentitem(self):
-        "return the parent to the current item"
+        b"return the parent to the current item"
         return self.hunk
 
     def firstchild(self):
-        "return the first child of this item, if one exists.  otherwise None."
+        b"return the first child of this item, if one exists.  otherwise None."
         # hunk-lines don't have children
         return None
 
     def lastchild(self):
-        "return the last child of this item, if one exists.  otherwise None."
+        b"return the last child of this item, if one exists.  otherwise None."
         # hunk-lines don't have children
         return None
 
@@ -368,25 +368,25 @@
             return None
 
     def parentitem(self):
-        "return the parent to the current item"
+        b"return the parent to the current item"
         return self.header
 
     def firstchild(self):
-        "return the first child of this item, if one exists.  otherwise None."
+        b"return the first child of this item, if one exists.  otherwise None."
         if len(self.changedlines) > 0:
             return self.changedlines[0]
         else:
             return None
 
     def lastchild(self):
-        "return the last child of this item, if one exists.  otherwise None."
+        b"return the last child of this item, if one exists.  otherwise None."
         if len(self.changedlines) > 0:
             return self.changedlines[-1]
         else:
             return None
 
     def allchildren(self):
-        "return a list of all of the direct children of this node"
+        b"return a list of all of the direct children of this node"
         return self.changedlines
 
     def countchanges(self):
@@ -395,14 +395,14 @@
             [
                 l
                 for l in self.changedlines
-                if l.applied and l.prettystr().startswith('+')
+                if l.applied and l.prettystr().startswith(b'+')
             ]
         )
         rem = len(
             [
                 l
                 for l in self.changedlines
-                if l.applied and l.prettystr().startswith('-')
+                if l.applied and l.prettystr().startswith(b'-')
             ]
         )
         return add, rem
@@ -414,7 +414,7 @@
         contextlen = (
             len(self.before) + len(self.after) + removedconvertedtocontext
         )
-        if self.after and self.after[-1] == '\\ No newline at end of file\n':
+        if self.after and self.after[-1] == b'\\ No newline at end of file\n':
             contextlen -= 1
         fromlen = contextlen + self.removed
         tolen = contextlen + self.added
@@ -432,12 +432,12 @@
             if tolen == 0 and toline > 0:
                 toline -= 1
 
-        fromtoline = '@@ -%d,%d +%d,%d @@%s\n' % (
+        fromtoline = b'@@ -%d,%d +%d,%d @@%s\n' % (
             fromline,
             fromlen,
             toline,
             tolen,
-            self.proc and (' ' + self.proc),
+            self.proc and (b' ' + self.proc),
         )
         return fromtoline
 
@@ -453,10 +453,10 @@
             changedlinestr = changedline.prettystr()
             if changedline.applied:
                 hunklinelist.append(changedlinestr)
-            elif changedlinestr.startswith("-"):
-                hunklinelist.append(" " + changedlinestr[1:])
+            elif changedlinestr.startswith(b"-"):
+                hunklinelist.append(b" " + changedlinestr[1:])
 
-        fp.write(''.join(self.before + hunklinelist + self.after))
+        fp.write(b''.join(self.before + hunklinelist + self.after))
 
     pretty = write
 
@@ -501,14 +501,14 @@
         for line in self.changedlines:
             text = line.linetext
             if line.applied:
-                if text.startswith('+'):
+                if text.startswith(b'+'):
                     dels.append(text[1:])
-                elif text.startswith('-'):
+                elif text.startswith(b'-'):
                     adds.append(text[1:])
-            elif text.startswith('+'):
+            elif text.startswith(b'+'):
                 dels.append(text[1:])
                 adds.append(text[1:])
-        hunk = ['-%s' % l for l in dels] + ['+%s' % l for l in adds]
+        hunk = [b'-%s' % l for l in dels] + [b'+%s' % l for l in adds]
         h = self._hunk
         return patchmod.recordhunk(
             h.header, h.toline, h.fromline, h.proc, h.before, hunk, h.after
@@ -561,13 +561,13 @@
     curses interface to get selection of chunks, and mark the applied flags
     of the chosen chunks.
     """
-    ui.write(_('starting interactive selection\n'))
+    ui.write(_(b'starting interactive selection\n'))
     chunkselector = curseschunkselector(headerlist, ui, operation)
     # This is required for ncurses to display non-ASCII characters in
     # default user locale encoding correctly.  --immerrr
     locale.setlocale(locale.LC_ALL, r'')
     origsigtstp = sentinel = object()
-    if util.safehasattr(signal, 'SIGTSTP'):
+    if util.safehasattr(signal, b'SIGTSTP'):
         origsigtstp = signal.getsignal(signal.SIGTSTP)
     try:
         curses.wrapper(chunkselector.main)
@@ -603,8 +603,8 @@
 
     chunkselector.stdscr = dummystdscr()
     if testfn and os.path.exists(testfn):
-        testf = open(testfn, 'rb')
-        testcommands = [x.rstrip('\n') for x in testf.readlines()]
+        testf = open(testfn, b'rb')
+        testcommands = [x.rstrip(b'\n') for x in testf.readlines()]
         testf.close()
         while True:
             if chunkselector.handlekeypressed(testcommands.pop(0), test=True):
@@ -613,10 +613,10 @@
 
 
 _headermessages = {  # {operation: text}
-    'apply': _('Select hunks to apply'),
-    'discard': _('Select hunks to discard'),
-    'keep': _('Select hunks to keep'),
-    None: _('Select hunks to record'),
+    b'apply': _(b'Select hunks to apply'),
+    b'discard': _(b'Select hunks to discard'),
+    b'keep': _(b'Select hunks to keep'),
+    None: _(b'Select hunks to record'),
 }
 
 
@@ -645,7 +645,7 @@
         # long as not explicitly set to a falsy value - especially,
         # when not set at all. This is to stay most compatible with
         # previous (color only) behaviour.
-        uicolor = stringutil.parsebool(self.ui.config('ui', 'color'))
+        uicolor = stringutil.parsebool(self.ui.config(b'ui', b'color'))
         self.usecolor = uicolor is not False
 
         # the currently selected header, hunk, or hunk-line
@@ -678,14 +678,16 @@
         self.firstlineofpadtoprint = 0
 
         # stores optional text for a commit comment provided by the user
-        self.commenttext = ""
+        self.commenttext = b""
 
         # if the last 'toggle all' command caused all changes to be applied
         self.waslasttoggleallapplied = True
 
         # affects some ui text
         if operation not in _headermessages:
-            raise error.ProgrammingError('unexpected operation: %s' % operation)
+            raise error.ProgrammingError(
+                b'unexpected operation: %s' % operation
+            )
         self.operation = operation
 
     def uparrowevent(self):
@@ -850,7 +852,7 @@
         self.currentselecteditem = currentitem
 
     def updatescroll(self):
-        "scroll the screen to fully show the currently-selected"
+        b"scroll the screen to fully show the currently-selected"
         selstart = self.selecteditemstartline
         selend = self.selecteditemendline
 
@@ -868,7 +870,7 @@
             self.scrolllines(selstart - padstartbuffered)
 
     def scrolllines(self, numlines):
-        "scroll the screen up (down) by numlines when numlines >0 (<0)."
+        b"scroll the screen up (down) by numlines when numlines >0 (<0)."
         self.firstlineofpadtoprint += numlines
         if self.firstlineofpadtoprint < 0:
             self.firstlineofpadtoprint = 0
@@ -970,7 +972,7 @@
                 )
 
     def toggleall(self):
-        "toggle the applied flag of all items."
+        b"toggle the applied flag of all items."
         if self.waslasttoggleallapplied:  # then unapply them this time
             for item in self.headerlist:
                 if item.applied:
@@ -982,7 +984,7 @@
         self.waslasttoggleallapplied = not self.waslasttoggleallapplied
 
     def toggleallbetween(self):
-        "toggle applied on or off for all items in range [lastapplied,current]."
+        b"toggle applied on or off for all items in range [lastapplied,current]."
         if (
             not self.lastapplieditem
             or self.currentselecteditem == self.lastapplieditem
@@ -994,7 +996,7 @@
         startitem = self.lastapplieditem
         enditem = self.currentselecteditem
         # Verify that enditem is "after" startitem, otherwise swap them.
-        for direction in ['forward', 'reverse']:
+        for direction in [b'forward', b'reverse']:
             nextitem = startitem.nextitem()
             while nextitem and nextitem != enditem:
                 nextitem = nextitem.nextitem()
@@ -1023,7 +1025,7 @@
             nextitem = nextitem.nextitem()
 
     def togglefolded(self, item=None, foldparent=False):
-        "toggle folded flag of specified item (defaults to currently selected)"
+        b"toggle folded flag of specified item (defaults to currently selected)"
         if item is None:
             item = self.currentselecteditem
         if foldparent or (isinstance(item, uiheader) and item.neverunfolded):
@@ -1054,7 +1056,7 @@
         instr = instr.expandtabs(4)
         strwidth = encoding.colwidth(instr)
         numspaces = width - ((strwidth + xstart) % width)
-        return instr + " " * numspaces
+        return instr + b" " * numspaces
 
     def printstring(
         self,
@@ -1092,8 +1094,8 @@
         # strip \n, and convert control characters to ^[char] representation
         text = re.sub(
             br'[\x00-\x08\x0a-\x1f]',
-            lambda m: '^' + chr(ord(m.group()) + 64),
-            text.strip('\n'),
+            lambda m: b'^' + chr(ord(m.group()) + 64),
+            text.strip(b'\n'),
         )
 
         if pair is not None:
@@ -1123,11 +1125,11 @@
                     colorpair |= textattr
 
         y, xstart = self.chunkpad.getyx()
-        t = ""  # variable for counting lines printed
+        t = b""  # variable for counting lines printed
         # if requested, show trailing whitespace
         if showwhtspc:
             origlen = len(text)
-            text = text.rstrip(' \n')  # tabs have already been expanded
+            text = text.rstrip(b' \n')  # tabs have already been expanded
             strippedlen = len(text)
             numtrailingspaces = origlen - strippedlen
 
@@ -1140,11 +1142,11 @@
             if towin:
                 for i in range(numtrailingspaces):
                     window.addch(curses.ACS_CKBOARD, wscolorpair)
-            t += " " * numtrailingspaces
+            t += b" " * numtrailingspaces
 
         if align:
             if towin:
-                extrawhitespace = self.alignstring("", window)
+                extrawhitespace = self.alignstring(b"", window)
                 window.addstr(extrawhitespace, colorpair)
             else:
                 # need to use t, since the x position hasn't incremented
@@ -1160,31 +1162,31 @@
     def _getstatuslinesegments(self):
         """-> [str]. return segments"""
         selected = self.currentselecteditem.applied
-        spaceselect = _('space/enter: select')
-        spacedeselect = _('space/enter: deselect')
+        spaceselect = _(b'space/enter: select')
+        spacedeselect = _(b'space/enter: deselect')
         # Format the selected label into a place as long as the longer of the
         # two possible labels.  This may vary by language.
         spacelen = max(len(spaceselect), len(spacedeselect))
-        selectedlabel = '%-*s' % (
+        selectedlabel = b'%-*s' % (
             spacelen,
             spacedeselect if selected else spaceselect,
         )
         segments = [
             _headermessages[self.operation],
-            '-',
-            _('[x]=selected **=collapsed'),
-            _('c: confirm'),
-            _('q: abort'),
-            _('arrow keys: move/expand/collapse'),
+            b'-',
+            _(b'[x]=selected **=collapsed'),
+            _(b'c: confirm'),
+            _(b'q: abort'),
+            _(b'arrow keys: move/expand/collapse'),
             selectedlabel,
-            _('?: help'),
+            _(b'?: help'),
         ]
         return segments
 
     def _getstatuslines(self):
         """() -> [str]. return short help used in the top status window"""
         if self.errorstr is not None:
-            lines = [self.errorstr, _('Press any key to continue')]
+            lines = [self.errorstr, _(b'Press any key to continue')]
         else:
             # wrap segments to lines
             segments = self._getstatuslinesegments()
@@ -1193,7 +1195,7 @@
             lastwidth = width
             for s in segments:
                 w = encoding.colwidth(s)
-                sep = ' ' * (1 + (s and s[0] not in '-['))
+                sep = b' ' * (1 + (s and s[0] not in b'-['))
                 if lastwidth + w + len(sep) >= width:
                     lines.append(s)
                     lastwidth = w
@@ -1214,7 +1216,7 @@
         # print out the status lines at the top
         try:
             for line in self._getstatuslines():
-                printstring(self.statuswin, line, pairname="legend")
+                printstring(self.statuswin, line, pairname=b"legend")
             self.statuswin.refresh()
         except curses.error:
             pass
@@ -1245,27 +1247,27 @@
         # create checkbox string
         if item.applied:
             if not isinstance(item, uihunkline) and item.partial:
-                checkbox = "[~]"
+                checkbox = b"[~]"
             else:
-                checkbox = "[x]"
+                checkbox = b"[x]"
         else:
-            checkbox = "[ ]"
+            checkbox = b"[ ]"
 
         try:
             if item.folded:
-                checkbox += "**"
+                checkbox += b"**"
                 if isinstance(item, uiheader):
                     # one of "m", "a", or "d" (modified, added, deleted)
                     filestatus = item.changetype
 
-                    checkbox += filestatus + " "
+                    checkbox += filestatus + b" "
             else:
-                checkbox += "  "
+                checkbox += b"  "
                 if isinstance(item, uiheader):
                     # add two more spaces for headers
-                    checkbox += "  "
+                    checkbox += b"  "
         except AttributeError:  # not foldable
-            checkbox += "  "
+            checkbox += b"  "
 
         return checkbox
 
@@ -1277,18 +1279,18 @@
         anything, but just count the number of lines which would be printed.
         """
 
-        outstr = ""
+        outstr = b""
         text = header.prettystr()
         chunkindex = self.chunklist.index(header)
 
         if chunkindex != 0 and not header.folded:
             # add separating line before headers
             outstr += self.printstring(
-                self.chunkpad, '_' * self.xscreensize, towin=towin, align=False
+                self.chunkpad, b'_' * self.xscreensize, towin=towin, align=False
             )
         # select color-pair based on if the header is selected
         colorpair = self.getcolorpair(
-            name=selected and "selected" or "normal", attrlist=[curses.A_BOLD]
+            name=selected and b"selected" or b"normal", attrlist=[curses.A_BOLD]
         )
 
         # print out each line of the chunk, expanding it to screen width
@@ -1297,7 +1299,7 @@
         indentnumchars = 0
         checkbox = self.getstatusprefixstring(header)
         if not header.folded or ignorefolding:
-            textlist = text.split("\n")
+            textlist = text.split(b"\n")
             linestr = checkbox + textlist[0]
         else:
             linestr = checkbox + header.filename()
@@ -1307,7 +1309,7 @@
         if not header.folded or ignorefolding:
             if len(textlist) > 1:
                 for line in textlist[1:]:
-                    linestr = " " * (indentnumchars + len(checkbox)) + line
+                    linestr = b" " * (indentnumchars + len(checkbox)) + line
                     outstr += self.printstring(
                         self.chunkpad, linestr, pair=colorpair, towin=towin
                     )
@@ -1317,26 +1319,26 @@
     def printhunklinesbefore(
         self, hunk, selected=False, towin=True, ignorefolding=False
     ):
-        "includes start/end line indicator"
-        outstr = ""
+        b"includes start/end line indicator"
+        outstr = b""
         # where hunk is in list of siblings
         hunkindex = hunk.header.hunks.index(hunk)
 
         if hunkindex != 0:
             # add separating line before headers
             outstr += self.printstring(
-                self.chunkpad, ' ' * self.xscreensize, towin=towin, align=False
+                self.chunkpad, b' ' * self.xscreensize, towin=towin, align=False
             )
 
         colorpair = self.getcolorpair(
-            name=selected and "selected" or "normal", attrlist=[curses.A_BOLD]
+            name=selected and b"selected" or b"normal", attrlist=[curses.A_BOLD]
         )
 
         # print out from-to line with checkbox
         checkbox = self.getstatusprefixstring(hunk)
 
-        lineprefix = " " * self.hunkindentnumchars + checkbox
-        frtoline = "   " + hunk.getfromtoline().strip("\n")
+        lineprefix = b" " * self.hunkindentnumchars + checkbox
+        frtoline = b"   " + hunk.getfromtoline().strip(b"\n")
 
         outstr += self.printstring(
             self.chunkpad, lineprefix, towin=towin, align=False
@@ -1351,41 +1353,45 @@
 
         # print out lines of the chunk preceeding changed-lines
         for line in hunk.before:
-            linestr = " " * (self.hunklineindentnumchars + len(checkbox)) + line
+            linestr = (
+                b" " * (self.hunklineindentnumchars + len(checkbox)) + line
+            )
             outstr += self.printstring(self.chunkpad, linestr, towin=towin)
 
         return outstr
 
     def printhunklinesafter(self, hunk, towin=True, ignorefolding=False):
-        outstr = ""
+        outstr = b""
         if hunk.folded and not ignorefolding:
             return outstr
 
         # a bit superfluous, but to avoid hard-coding indent amount
         checkbox = self.getstatusprefixstring(hunk)
         for line in hunk.after:
-            linestr = " " * (self.hunklineindentnumchars + len(checkbox)) + line
+            linestr = (
+                b" " * (self.hunklineindentnumchars + len(checkbox)) + line
+            )
             outstr += self.printstring(self.chunkpad, linestr, towin=towin)
 
         return outstr
 
     def printhunkchangedline(self, hunkline, selected=False, towin=True):
-        outstr = ""
+        outstr = b""
         checkbox = self.getstatusprefixstring(hunkline)
 
-        linestr = hunkline.prettystr().strip("\n")
+        linestr = hunkline.prettystr().strip(b"\n")
 
         # select color-pair based on whether line is an addition/removal
         if selected:
-            colorpair = self.getcolorpair(name="selected")
-        elif linestr.startswith("+"):
-            colorpair = self.getcolorpair(name="addition")
-        elif linestr.startswith("-"):
-            colorpair = self.getcolorpair(name="deletion")
-        elif linestr.startswith("\\"):
-            colorpair = self.getcolorpair(name="normal")
+            colorpair = self.getcolorpair(name=b"selected")
+        elif linestr.startswith(b"+"):
+            colorpair = self.getcolorpair(name=b"addition")
+        elif linestr.startswith(b"-"):
+            colorpair = self.getcolorpair(name=b"deletion")
+        elif linestr.startswith(b"\\"):
+            colorpair = self.getcolorpair(name=b"normal")
 
-        lineprefix = " " * self.hunklineindentnumchars + checkbox
+        lineprefix = b" " * self.hunklineindentnumchars + checkbox
         outstr += self.printstring(
             self.chunkpad, lineprefix, towin=towin, align=False
         )  # add uncolored checkbox/indent
@@ -1412,7 +1418,7 @@
         self.__printitem(
             item, ignorefolding, recursechildren, outstr, towin=towin
         )
-        return ''.join(outstr)
+        return b''.join(outstr)
 
     def outofdisplayedarea(self):
         y, _ = self.chunkpad.getyx()  # cursor location
@@ -1522,7 +1528,7 @@
         return numlines
 
     def sigwinchhandler(self, n, frame):
-        "handle window resizing"
+        b"handle window resizing"
         try:
             curses.endwin()
             self.xscreensize, self.yscreensize = scmutil.termsize(self.ui)
@@ -1572,7 +1578,7 @@
                 else:
                     cval = 0
                     if name is not None:
-                        if name == 'selected':
+                        if name == b'selected':
                             cval = curses.A_REVERSE
                         self.colorpairnames[name] = cval
                     colorpair = self.colorpairs[(fgcolor, bgcolor)] = cval
@@ -1592,11 +1598,11 @@
         return colorpair
 
     def initcolorpair(self, *args, **kwargs):
-        "same as getcolorpair."
+        b"same as getcolorpair."
         self.getcolorpair(*args, **kwargs)
 
     def helpwindow(self):
-        "print a help window to the screen.  exit after any keypress."
+        b"print a help window to the screen.  exit after any keypress."
         helptext = _(
             """            [press any key to return to the patch-display]
 
@@ -1630,24 +1636,24 @@
         )
 
         helpwin = curses.newwin(self.yscreensize, 0, 0, 0)
-        helplines = helptext.split("\n")
-        helplines = helplines + [" "] * (
+        helplines = helptext.split(b"\n")
+        helplines = helplines + [b" "] * (
             self.yscreensize - self.numstatuslines - len(helplines) - 1
         )
         try:
             for line in helplines:
-                self.printstring(helpwin, line, pairname="legend")
+                self.printstring(helpwin, line, pairname=b"legend")
         except curses.error:
             pass
         helpwin.refresh()
         try:
-            with self.ui.timeblockedsection('crecord'):
+            with self.ui.timeblockedsection(b'crecord'):
                 helpwin.getkey()
         except curses.error:
             pass
 
     def commitMessageWindow(self):
-        "Create a temporary commit message editing window on the screen."
+        b"Create a temporary commit message editing window on the screen."
 
         curses.raw()
         curses.def_prog_mode()
@@ -1697,19 +1703,19 @@
         self.recenterdisplayedarea()
 
     def confirmationwindow(self, windowtext):
-        "display an informational window, then wait for and return a keypress."
+        b"display an informational window, then wait for and return a keypress."
 
         confirmwin = curses.newwin(self.yscreensize, 0, 0, 0)
         try:
-            lines = windowtext.split("\n")
+            lines = windowtext.split(b"\n")
             for line in lines:
-                self.printstring(confirmwin, line, pairname="selected")
+                self.printstring(confirmwin, line, pairname=b"selected")
         except curses.error:
             pass
         self.stdscr.refresh()
         confirmwin.refresh()
         try:
-            with self.ui.timeblockedsection('crecord'):
+            with self.ui.timeblockedsection(b'crecord'):
                 response = chr(self.stdscr.getch())
         except ValueError:
             response = None
@@ -1731,11 +1737,11 @@
 are you sure you want to review/edit and confirm the selected changes [yn]?
 """
         )
-        with self.ui.timeblockedsection('crecord'):
+        with self.ui.timeblockedsection(b'crecord'):
             response = self.confirmationwindow(confirmtext)
         if response is None:
-            response = "n"
-        if response.lower().startswith("y"):
+            response = b"n"
+        if response.lower().startswith(b"y"):
             return True
         else:
             return False
@@ -1748,20 +1754,20 @@
         new changeset will be created (the normal commit behavior).
         """
 
-        if opts.get('amend') is None:
-            opts['amend'] = True
+        if opts.get(b'amend') is None:
+            opts[b'amend'] = True
             msg = _(
-                "Amend option is turned on -- committing the currently "
-                "selected changes will not create a new changeset, but "
-                "instead update the most recently committed changeset.\n\n"
-                "Press any key to continue."
+                b"Amend option is turned on -- committing the currently "
+                b"selected changes will not create a new changeset, but "
+                b"instead update the most recently committed changeset.\n\n"
+                b"Press any key to continue."
             )
-        elif opts.get('amend') is True:
-            opts['amend'] = None
+        elif opts.get(b'amend') is True:
+            opts[b'amend'] = None
             msg = _(
-                "Amend option is turned off -- committing the currently "
-                "selected changes will create a new changeset.\n\n"
-                "Press any key to continue."
+                b"Amend option is turned off -- committing the currently "
+                b"selected changes will create a new changeset.\n\n"
+                b"Press any key to continue."
             )
         if not test:
             self.confirmationwindow(msg)
@@ -1791,12 +1797,12 @@
 
         def editpatchwitheditor(self, chunk):
             if chunk is None:
-                self.ui.write(_('cannot edit patch for whole file'))
-                self.ui.write("\n")
+                self.ui.write(_(b'cannot edit patch for whole file'))
+                self.ui.write(b"\n")
                 return None
             if chunk.header.binary():
-                self.ui.write(_('cannot edit patch for binary file'))
-                self.ui.write("\n")
+                self.ui.write(_(b'cannot edit patch for binary file'))
+                self.ui.write(b"\n")
                 return None
 
             # write the initial patch
@@ -1807,7 +1813,7 @@
 
             # start the editor and wait for it to complete
             try:
-                patch = self.ui.edit(patch.getvalue(), "", action="diff")
+                patch = self.ui.edit(patch.getvalue(), b"", action=b"diff")
             except error.Abort as exc:
                 self.errorstr = str(exc)
                 return None
@@ -1817,9 +1823,9 @@
 
             # remove comment lines
             patch = [
-                line + '\n'
+                line + b'\n'
                 for line in patch.splitlines()
-                if not line.startswith('#')
+                if not line.startswith(b'#')
             ]
             return patchmod.parsepatch(patch)
 
@@ -1881,59 +1887,59 @@
         Return true to exit the main loop.
         """
         keypressed = pycompat.bytestr(keypressed)
-        if keypressed in ["k", "KEY_UP"]:
+        if keypressed in [b"k", b"KEY_UP"]:
             self.uparrowevent()
-        elif keypressed in ["K", "KEY_PPAGE"]:
+        elif keypressed in [b"K", b"KEY_PPAGE"]:
             self.uparrowshiftevent()
-        elif keypressed in ["j", "KEY_DOWN"]:
+        elif keypressed in [b"j", b"KEY_DOWN"]:
             self.downarrowevent()
-        elif keypressed in ["J", "KEY_NPAGE"]:
+        elif keypressed in [b"J", b"KEY_NPAGE"]:
             self.downarrowshiftevent()
-        elif keypressed in ["l", "KEY_RIGHT"]:
+        elif keypressed in [b"l", b"KEY_RIGHT"]:
             self.rightarrowevent()
-        elif keypressed in ["h", "KEY_LEFT"]:
+        elif keypressed in [b"h", b"KEY_LEFT"]:
             self.leftarrowevent()
-        elif keypressed in ["H", "KEY_SLEFT"]:
+        elif keypressed in [b"H", b"KEY_SLEFT"]:
             self.leftarrowshiftevent()
-        elif keypressed in ["q"]:
-            raise error.Abort(_('user quit'))
-        elif keypressed in ['a']:
+        elif keypressed in [b"q"]:
+            raise error.Abort(_(b'user quit'))
+        elif keypressed in [b'a']:
             self.toggleamend(self.opts, test)
-        elif keypressed in ["c"]:
+        elif keypressed in [b"c"]:
             return True
-        elif keypressed in ["r"]:
+        elif keypressed in [b"r"]:
             if self.reviewcommit():
-                self.opts['review'] = True
+                self.opts[b'review'] = True
                 return True
-        elif test and keypressed in ['R']:
-            self.opts['review'] = True
+        elif test and keypressed in [b'R']:
+            self.opts[b'review'] = True
             return True
-        elif keypressed in [' ', 'x']:
+        elif keypressed in [b' ', b'x']:
             self.toggleapply()
-        elif keypressed in ['\n', 'KEY_ENTER']:
+        elif keypressed in [b'\n', b'KEY_ENTER']:
             self.toggleapply()
             self.nextsametype(test=test)
-        elif keypressed in ['X']:
+        elif keypressed in [b'X']:
             self.toggleallbetween()
-        elif keypressed in ['A']:
+        elif keypressed in [b'A']:
             self.toggleall()
-        elif keypressed in ['e']:
+        elif keypressed in [b'e']:
             self.toggleedit(test=test)
-        elif keypressed in ["f"]:
+        elif keypressed in [b"f"]:
             self.togglefolded()
-        elif keypressed in ["F"]:
+        elif keypressed in [b"F"]:
             self.togglefolded(foldparent=True)
-        elif keypressed in ["m"]:
+        elif keypressed in [b"m"]:
             self.commitMessageWindow()
-        elif keypressed in ["g", "KEY_HOME"]:
+        elif keypressed in [b"g", b"KEY_HOME"]:
             self.handlefirstlineevent()
-        elif keypressed in ["G", "KEY_END"]:
+        elif keypressed in [b"G", b"KEY_END"]:
             self.handlelastlineevent()
-        elif keypressed in ["?"]:
+        elif keypressed in [b"?"]:
             self.helpwindow()
             self.stdscr.clear()
             self.stdscr.refresh()
-        elif curses.unctrl(keypressed) in ["^L"]:
+        elif curses.unctrl(keypressed) in [b"^L"]:
             # scroll the current line to the top of the screen, and redraw
             # everything
             self.scrolllines(self.selecteditemstartline)
@@ -1946,7 +1952,7 @@
         """
 
         origsigwinch = sentinel = object()
-        if util.safehasattr(signal, 'SIGWINCH'):
+        if util.safehasattr(signal, b'SIGWINCH'):
             origsigwinch = signal.signal(signal.SIGWINCH, self.sigwinchhandler)
         try:
             return self._main(stdscr)
@@ -1981,13 +1987,15 @@
 
         # available colors: black, blue, cyan, green, magenta, white, yellow
         # init_pair(color_id, foreground_color, background_color)
-        self.initcolorpair(None, None, name="normal")
+        self.initcolorpair(None, None, name=b"normal")
         self.initcolorpair(
-            curses.COLOR_WHITE, curses.COLOR_MAGENTA, name="selected"
+            curses.COLOR_WHITE, curses.COLOR_MAGENTA, name=b"selected"
         )
-        self.initcolorpair(curses.COLOR_RED, None, name="deletion")
-        self.initcolorpair(curses.COLOR_GREEN, None, name="addition")
-        self.initcolorpair(curses.COLOR_WHITE, curses.COLOR_BLUE, name="legend")
+        self.initcolorpair(curses.COLOR_RED, None, name=b"deletion")
+        self.initcolorpair(curses.COLOR_GREEN, None, name=b"addition")
+        self.initcolorpair(
+            curses.COLOR_WHITE, curses.COLOR_BLUE, name=b"legend"
+        )
         # newwin([height, width,] begin_y, begin_x)
         self.statuswin = curses.newwin(self.numstatuslines, 0, 0, 0)
         self.statuswin.keypad(1)  # interpret arrow-key, etc. esc sequences
@@ -2005,7 +2013,7 @@
             self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
         except curses.error:
             self.initexc = fallbackerror(
-                _('this diff is too large to be displayed')
+                _(b'this diff is too large to be displayed')
             )
             return
         # initialize selecteditemendline (initial start-line is 0)
@@ -2016,19 +2024,19 @@
         while True:
             self.updatescreen()
             try:
-                with self.ui.timeblockedsection('crecord'):
+                with self.ui.timeblockedsection(b'crecord'):
                     keypressed = self.statuswin.getkey()
                 if self.errorstr is not None:
                     self.errorstr = None
                     continue
             except curses.error:
-                keypressed = "foobar"
+                keypressed = b"foobar"
             if self.handlekeypressed(keypressed):
                 break
 
-        if self.commenttext != "":
+        if self.commenttext != b"":
             whitespaceremoved = re.sub(
                 br"(?m)^\s.*(\n|$)", b"", self.commenttext
             )
-            if whitespaceremoved != "":
-                self.opts['message'] = self.commenttext
+            if whitespaceremoved != b"":
+                self.opts[b'message'] = self.commenttext
--- a/mercurial/dagop.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/dagop.py	Sun Oct 06 09:48:39 2019 -0400
@@ -43,7 +43,7 @@
     if stopdepth == 0:
         return
     if stopdepth < 0:
-        raise error.ProgrammingError('negative stopdepth')
+        raise error.ProgrammingError(b'negative stopdepth')
     if reverse:
         heapsign = -1  # max heap
     else:
@@ -330,7 +330,7 @@
     """
     blocks = mdiff.allblocks(fctx1.data(), fctx2.data(), diffopts)
     filteredblocks, linerange1 = mdiff.blocksinrange(blocks, linerange2)
-    diffinrange = any(stype == '!' for _, stype in filteredblocks)
+    diffinrange = any(stype == b'!' for _, stype in filteredblocks)
     return diffinrange, linerange1
 
 
@@ -428,9 +428,9 @@
 
 
 def _countlines(text):
-    if text.endswith("\n"):
-        return text.count("\n")
-    return text.count("\n") + int(bool(text))
+    if text.endswith(b"\n"):
+        return text.count(b"\n")
+    return text.count(b"\n") + int(bool(text))
 
 
 def _decoratelines(text, fctx):
@@ -464,7 +464,7 @@
         for (a1, a2, b1, b2), t in blocks:
             # Changed blocks ('!') or blocks made only of blank lines ('~')
             # belong to the child.
-            if t == '=':
+            if t == b'=':
                 child.fctxs[b1:b2] = parent.fctxs[a1:a2]
                 child.linenos[b1:b2] = parent.linenos[a1:a2]
                 child.skips[b1:b2] = parent.skips[a1:a2]
--- a/mercurial/dagparser.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/dagparser.py	Sun Oct 06 09:48:39 2019 -0400
@@ -185,17 +185,17 @@
     chiter = pycompat.iterbytestr(desc)
 
     def nextch():
-        return next(chiter, '\0')
+        return next(chiter, b'\0')
 
     def nextrun(c, allow):
-        s = ''
+        s = b''
         while c in allow:
             s += c
             c = nextch()
         return c, s
 
     def nextdelimited(c, limit, escape):
-        s = ''
+        s = b''
         while c != limit:
             if c == escape:
                 c = nextch()
@@ -204,78 +204,78 @@
         return nextch(), s
 
     def nextstring(c):
-        if c == '"':
-            return nextdelimited(nextch(), '"', '\\')
+        if c == b'"':
+            return nextdelimited(nextch(), b'"', b'\\')
         else:
             return nextrun(c, wordchars)
 
     c = nextch()
-    while c != '\0':
+    while c != b'\0':
         while c in pycompat.bytestr(string.whitespace):
             c = nextch()
-        if c == '.':
-            yield 'n', (r, [p1])
+        if c == b'.':
+            yield b'n', (r, [p1])
             p1 = r
             r += 1
             c = nextch()
-        elif c == '+':
+        elif c == b'+':
             c, digs = nextrun(nextch(), pycompat.bytestr(string.digits))
             n = int(digs)
             for i in pycompat.xrange(0, n):
-                yield 'n', (r, [p1])
+                yield b'n', (r, [p1])
                 p1 = r
                 r += 1
-        elif c in '*/':
-            if c == '*':
+        elif c in b'*/':
+            if c == b'*':
                 c = nextch()
             c, pref = nextstring(c)
             prefs = [pref]
-            while c == '/':
+            while c == b'/':
                 c, pref = nextstring(nextch())
                 prefs.append(pref)
             ps = [resolve(ref) for ref in prefs]
-            yield 'n', (r, ps)
+            yield b'n', (r, ps)
             p1 = r
             r += 1
-        elif c == '<':
+        elif c == b'<':
             c, ref = nextstring(nextch())
             p1 = resolve(ref)
-        elif c == ':':
+        elif c == b':':
             c, name = nextstring(nextch())
             labels[name] = p1
-            yield 'l', (p1, name)
-        elif c == '@':
+            yield b'l', (p1, name)
+        elif c == b'@':
             c, text = nextstring(nextch())
-            yield 'a', text
-        elif c == '!':
+            yield b'a', text
+        elif c == b'!':
             c = nextch()
-            if c == '!':
-                cmd = ''
+            if c == b'!':
+                cmd = b''
                 c = nextch()
-                while c not in '\n\r\0':
+                while c not in b'\n\r\0':
                     cmd += c
                     c = nextch()
-                yield 'C', cmd
+                yield b'C', cmd
             else:
                 c, cmd = nextstring(c)
-                yield 'c', cmd
-        elif c == '#':
-            while c not in '\n\r\0':
+                yield b'c', cmd
+        elif c == b'#':
+            while c not in b'\n\r\0':
                 c = nextch()
-        elif c == '$':
+        elif c == b'$':
             p1 = -1
             c = nextch()
-        elif c == '\0':
+        elif c == b'\0':
             return  # in case it was preceded by whitespace
         else:
-            s = ''
+            s = b''
             i = 0
-            while c != '\0' and i < 10:
+            while c != b'\0' and i < 10:
                 s += c
                 i += 1
                 c = nextch()
             raise error.Abort(
-                _('invalid character in dag description: ' '%s...') % s
+                _(b'invalid character in dag description: ' b'%s...') % s
             )
 
 
@@ -292,9 +292,9 @@
     '''generates single lines for dagtext()'''
 
     def wrapstring(text):
-        if re.match("^[0-9a-z]*$", text):
+        if re.match(b"^[0-9a-z]*$", text):
             return text
-        return '"' + text.replace('\\', '\\\\').replace('"', '\"') + '"'
+        return b'"' + text.replace(b'\\', b'\\\\').replace(b'"', b'\"') + b'"'
 
     def gen():
         labels = {}
@@ -302,12 +302,12 @@
         wantr = 0
         needroot = False
         for kind, data in events:
-            if kind == 'n':
+            if kind == b'n':
                 r, ps = data
 
                 # sanity check
                 if r != wantr:
-                    raise error.Abort(_("expected id %i, got %i") % (wantr, r))
+                    raise error.Abort(_(b"expected id %i, got %i") % (wantr, r))
                 if not ps:
                     ps = [-1]
                 else:
@@ -315,8 +315,8 @@
                         if p >= r:
                             raise error.Abort(
                                 _(
-                                    "parent id %i is larger than "
-                                    "current id %i"
+                                    b"parent id %i is larger than "
+                                    b"current id %i"
                                 )
                                 % (p, r)
                             )
@@ -327,81 +327,81 @@
                 if len(ps) == 1 and ps[0] == -1:
                     if needroot:
                         if run:
-                            yield '+%d' % run
+                            yield b'+%d' % run
                             run = 0
                         if wrapnonlinear:
-                            yield '\n'
-                        yield '$'
+                            yield b'\n'
+                        yield b'$'
                         p1 = -1
                     else:
                         needroot = True
                 if len(ps) == 1 and ps[0] == p1:
                     if usedots:
-                        yield "."
+                        yield b"."
                     else:
                         run += 1
                 else:
                     if run:
-                        yield '+%d' % run
+                        yield b'+%d' % run
                         run = 0
                     if wrapnonlinear:
-                        yield '\n'
+                        yield b'\n'
                     prefs = []
                     for p in ps:
                         if p == p1:
-                            prefs.append('')
+                            prefs.append(b'')
                         elif p in labels:
                             prefs.append(labels[p])
                         else:
-                            prefs.append('%d' % (r - p))
-                    yield '*' + '/'.join(prefs)
+                            prefs.append(b'%d' % (r - p))
+                    yield b'*' + b'/'.join(prefs)
             else:
                 if run:
-                    yield '+%d' % run
+                    yield b'+%d' % run
                     run = 0
-                if kind == 'l':
+                if kind == b'l':
                     rid, name = data
                     labels[rid] = name
-                    yield ':' + name
+                    yield b':' + name
                     if wraplabels:
-                        yield '\n'
-                elif kind == 'c':
-                    yield '!' + wrapstring(data)
+                        yield b'\n'
+                elif kind == b'c':
+                    yield b'!' + wrapstring(data)
                     if wrapcommands:
-                        yield '\n'
-                elif kind == 'C':
-                    yield '!!' + data
-                    yield '\n'
-                elif kind == 'a':
+                        yield b'\n'
+                elif kind == b'C':
+                    yield b'!!' + data
+                    yield b'\n'
+                elif kind == b'a':
                     if wrapannotations:
-                        yield '\n'
-                    yield '@' + wrapstring(data)
-                elif kind == '#':
-                    yield '#' + data
-                    yield '\n'
+                        yield b'\n'
+                    yield b'@' + wrapstring(data)
+                elif kind == b'#':
+                    yield b'#' + data
+                    yield b'\n'
                 else:
                     raise error.Abort(
-                        _("invalid event type in dag: " "('%s', '%s')")
+                        _(b"invalid event type in dag: " b"('%s', '%s')")
                         % (
                             stringutil.escapestr(kind),
                             stringutil.escapestr(data),
                         )
                     )
         if run:
-            yield '+%d' % run
+            yield b'+%d' % run
 
-    line = ''
+    line = b''
     for part in gen():
-        if part == '\n':
+        if part == b'\n':
             if line:
                 yield line
-                line = ''
+                line = b''
         else:
             if len(line) + len(part) >= maxlinewidth:
                 yield line
-                line = ''
-            elif addspaces and line and part != '.':
-                line += ' '
+                line = b''
+            elif addspaces and line and part != b'.':
+                line += b' '
             line += part
     if line:
         yield line
@@ -494,7 +494,7 @@
         '+1 :f +1 :p2 *f */p2'
 
     '''
-    return "\n".join(
+    return b"\n".join(
         dagtextlines(
             dag,
             addspaces,
--- a/mercurial/debugcommands.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/debugcommands.py	Sun Oct 06 09:48:39 2019 -0400
@@ -95,7 +95,7 @@
 command = registrar.command()
 
 
-@command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
+@command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
 def debugancestor(ui, repo, *args):
     """find the ancestor revision of two revisions in a given index"""
     if len(args) == 3:
@@ -105,18 +105,18 @@
     elif len(args) == 2:
         if not repo:
             raise error.Abort(
-                _('there is no Mercurial repository here ' '(.hg not found)')
+                _(b'there is no Mercurial repository here ' b'(.hg not found)')
             )
         rev1, rev2 = args
         r = repo.changelog
         lookup = repo.lookup
     else:
-        raise error.Abort(_('either two or three arguments required'))
+        raise error.Abort(_(b'either two or three arguments required'))
     a = r.ancestor(lookup(rev1), lookup(rev2))
-    ui.write('%d:%s\n' % (r.rev(a), hex(a)))
-
-
-@command('debugapplystreamclonebundle', [], 'FILE')
+    ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
+
+
+@command(b'debugapplystreamclonebundle', [], b'FILE')
 def debugapplystreamclonebundle(ui, repo, fname):
     """apply a stream clone bundle file"""
     f = hg.openpath(ui, fname)
@@ -125,18 +125,23 @@
 
 
 @command(
-    'debugbuilddag',
+    b'debugbuilddag',
     [
-        ('m', 'mergeable-file', None, _('add single file mergeable changes')),
+        (
+            b'm',
+            b'mergeable-file',
+            None,
+            _(b'add single file mergeable changes'),
+        ),
         (
-            'o',
-            'overwritten-file',
+            b'o',
+            b'overwritten-file',
             None,
-            _('add single file all revs overwrite'),
+            _(b'add single file all revs overwrite'),
         ),
-        ('n', 'new-file', None, _('add new file at each rev')),
+        (b'n', b'new-file', None, _(b'add new file at each rev')),
     ],
-    _('[OPTION]... [TEXT]'),
+    _(b'[OPTION]... [TEXT]'),
 )
 def debugbuilddag(
     ui,
@@ -179,38 +184,40 @@
     """
 
     if text is None:
-        ui.status(_("reading DAG from stdin\n"))
+        ui.status(_(b"reading DAG from stdin\n"))
         text = ui.fin.read()
 
     cl = repo.changelog
     if len(cl) > 0:
-        raise error.Abort(_('repository is not empty'))
+        raise error.Abort(_(b'repository is not empty'))
 
     # determine number of revs in DAG
     total = 0
     for type, data in dagparser.parsedag(text):
-        if type == 'n':
+        if type == b'n':
             total += 1
 
     if mergeable_file:
         linesperrev = 2
         # make a file with k lines per rev
         initialmergedlines = [
-            '%d' % i for i in pycompat.xrange(0, total * linesperrev)
+            b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
         ]
-        initialmergedlines.append("")
+        initialmergedlines.append(b"")
 
     tags = []
-    progress = ui.makeprogress(_('building'), unit=_('revisions'), total=total)
-    with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
+    progress = ui.makeprogress(
+        _(b'building'), unit=_(b'revisions'), total=total
+    )
+    with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
         at = -1
-        atbranch = 'default'
+        atbranch = b'default'
         nodeids = []
         id = 0
         progress.update(id)
         for type, data in dagparser.parsedag(text):
-            if type == 'n':
-                ui.note(('node %s\n' % pycompat.bytestr(data)))
+            if type == b'n':
+                ui.note((b'node %s\n' % pycompat.bytestr(data)))
                 id, ps = data
 
                 files = []
@@ -218,7 +225,7 @@
 
                 p2 = None
                 if mergeable_file:
-                    fn = "mf"
+                    fn = b"mf"
                     p1 = repo[ps[0]]
                     if len(ps) > 1:
                         p2 = repo[ps[1]]
@@ -228,30 +235,30 @@
                         ]
                         m3 = simplemerge.Merge3Text(base, local, other)
                         ml = [l.strip() for l in m3.merge_lines()]
-                        ml.append("")
+                        ml.append(b"")
                     elif at > 0:
-                        ml = p1[fn].data().split("\n")
+                        ml = p1[fn].data().split(b"\n")
                     else:
                         ml = initialmergedlines
-                    ml[id * linesperrev] += " r%i" % id
-                    mergedtext = "\n".join(ml)
+                    ml[id * linesperrev] += b" r%i" % id
+                    mergedtext = b"\n".join(ml)
                     files.append(fn)
                     filecontent[fn] = mergedtext
 
                 if overwritten_file:
-                    fn = "of"
+                    fn = b"of"
                     files.append(fn)
-                    filecontent[fn] = "r%i\n" % id
+                    filecontent[fn] = b"r%i\n" % id
 
                 if new_file:
-                    fn = "nf%i" % id
+                    fn = b"nf%i" % id
                     files.append(fn)
-                    filecontent[fn] = "r%i\n" % id
+                    filecontent[fn] = b"r%i\n" % id
                     if len(ps) > 1:
                         if not p2:
                             p2 = repo[ps[1]]
                         for fn in p2:
-                            if fn.startswith("nf"):
+                            if fn.startswith(b"nf"):
                                 files.append(fn)
                                 filecontent[fn] = p2[fn].data()
 
@@ -271,43 +278,43 @@
                 cx = context.memctx(
                     repo,
                     pars,
-                    "r%i" % id,
+                    b"r%i" % id,
                     files,
                     fctxfn,
                     date=(id, 0),
-                    user="debugbuilddag",
-                    extra={'branch': atbranch},
+                    user=b"debugbuilddag",
+                    extra={b'branch': atbranch},
                 )
                 nodeid = repo.commitctx(cx)
                 nodeids.append(nodeid)
                 at = id
-            elif type == 'l':
+            elif type == b'l':
                 id, name = data
-                ui.note(('tag %s\n' % name))
-                tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
-            elif type == 'a':
-                ui.note(('branch %s\n' % data))
+                ui.note((b'tag %s\n' % name))
+                tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
+            elif type == b'a':
+                ui.note((b'branch %s\n' % data))
                 atbranch = data
             progress.update(id)
 
         if tags:
-            repo.vfs.write("localtags", "".join(tags))
+            repo.vfs.write(b"localtags", b"".join(tags))
 
 
 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
-    indent_string = ' ' * indent
+    indent_string = b' ' * indent
     if all:
         ui.write(
-            "%sformat: id, p1, p2, cset, delta base, len(delta)\n"
+            b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
             % indent_string
         )
 
         def showchunks(named):
-            ui.write("\n%s%s\n" % (indent_string, named))
+            ui.write(b"\n%s%s\n" % (indent_string, named))
             for deltadata in gen.deltaiter():
                 node, p1, p2, cs, deltabase, delta, flags = deltadata
                 ui.write(
-                    "%s%s %s %s %s %s %d\n"
+                    b"%s%s %s %s %s %s %d\n"
                     % (
                         indent_string,
                         hex(node),
@@ -320,37 +327,37 @@
                 )
 
         chunkdata = gen.changelogheader()
-        showchunks("changelog")
+        showchunks(b"changelog")
         chunkdata = gen.manifestheader()
-        showchunks("manifest")
+        showchunks(b"manifest")
         for chunkdata in iter(gen.filelogheader, {}):
-            fname = chunkdata['filename']
+            fname = chunkdata[b'filename']
             showchunks(fname)
     else:
         if isinstance(gen, bundle2.unbundle20):
-            raise error.Abort(_('use debugbundle2 for this file'))
+            raise error.Abort(_(b'use debugbundle2 for this file'))
         chunkdata = gen.changelogheader()
         for deltadata in gen.deltaiter():
             node, p1, p2, cs, deltabase, delta, flags = deltadata
-            ui.write("%s%s\n" % (indent_string, hex(node)))
+            ui.write(b"%s%s\n" % (indent_string, hex(node)))
 
 
 def _debugobsmarkers(ui, part, indent=0, **opts):
     """display version and markers contained in 'data'"""
     opts = pycompat.byteskwargs(opts)
     data = part.read()
-    indent_string = ' ' * indent
+    indent_string = b' ' * indent
     try:
         version, markers = obsolete._readmarkers(data)
     except error.UnknownVersion as exc:
-        msg = "%sunsupported version: %s (%d bytes)\n"
+        msg = b"%sunsupported version: %s (%d bytes)\n"
         msg %= indent_string, exc.version, len(data)
         ui.write(msg)
     else:
-        msg = "%sversion: %d (%d bytes)\n"
+        msg = b"%sversion: %d (%d bytes)\n"
         msg %= indent_string, version, len(data)
         ui.write(msg)
-        fm = ui.formatter('debugobsolete', opts)
+        fm = ui.formatter(b'debugobsolete', opts)
         for rawmarker in sorted(markers):
             m = obsutil.marker(None, rawmarker)
             fm.startitem()
@@ -361,17 +368,17 @@
 
 def _debugphaseheads(ui, data, indent=0):
     """display version and markers contained in 'data'"""
-    indent_string = ' ' * indent
+    indent_string = b' ' * indent
     headsbyphase = phases.binarydecode(data)
     for phase in phases.allphases:
         for head in headsbyphase[phase]:
             ui.write(indent_string)
-            ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
+            ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
 
 
 def _quasirepr(thing):
     if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
-        return '{%s}' % (
+        return b'{%s}' % (
             b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
         )
     return pycompat.bytestr(repr(thing))
@@ -380,35 +387,35 @@
 def _debugbundle2(ui, gen, all=None, **opts):
     """lists the contents of a bundle2"""
     if not isinstance(gen, bundle2.unbundle20):
-        raise error.Abort(_('not a bundle2 file'))
-    ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
+        raise error.Abort(_(b'not a bundle2 file'))
+    ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
     parttypes = opts.get(r'part_type', [])
     for part in gen.iterparts():
         if parttypes and part.type not in parttypes:
             continue
-        msg = '%s -- %s (mandatory: %r)\n'
+        msg = b'%s -- %s (mandatory: %r)\n'
         ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
-        if part.type == 'changegroup':
-            version = part.params.get('version', '01')
-            cg = changegroup.getunbundler(version, part, 'UN')
+        if part.type == b'changegroup':
+            version = part.params.get(b'version', b'01')
+            cg = changegroup.getunbundler(version, part, b'UN')
             if not ui.quiet:
                 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
-        if part.type == 'obsmarkers':
+        if part.type == b'obsmarkers':
             if not ui.quiet:
                 _debugobsmarkers(ui, part, indent=4, **opts)
-        if part.type == 'phase-heads':
+        if part.type == b'phase-heads':
             if not ui.quiet:
                 _debugphaseheads(ui, part, indent=4)
 
 
 @command(
-    'debugbundle',
+    b'debugbundle',
     [
-        ('a', 'all', None, _('show all details')),
-        ('', 'part-type', [], _('show only the named part type')),
-        ('', 'spec', None, _('print the bundlespec of the bundle')),
+        (b'a', b'all', None, _(b'show all details')),
+        (b'', b'part-type', [], _(b'show only the named part type')),
+        (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
     ],
-    _('FILE'),
+    _(b'FILE'),
     norepo=True,
 )
 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
@@ -416,7 +423,7 @@
     with hg.openpath(ui, bundlepath) as f:
         if spec:
             spec = exchange.getbundlespec(ui, f)
-            ui.write('%s\n' % spec)
+            ui.write(b'%s\n' % spec)
             return
 
         gen = exchange.readbundle(ui, f, bundlepath)
@@ -425,25 +432,25 @@
         _debugchangegroup(ui, gen, all=all, **opts)
 
 
-@command('debugcapabilities', [], _('PATH'), norepo=True)
+@command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
 def debugcapabilities(ui, path, **opts):
     """lists the capabilities of a remote peer"""
     opts = pycompat.byteskwargs(opts)
     peer = hg.peer(ui, opts, path)
     caps = peer.capabilities()
-    ui.write('Main capabilities:\n')
+    ui.write(b'Main capabilities:\n')
     for c in sorted(caps):
-        ui.write('  %s\n' % c)
+        ui.write(b'  %s\n' % c)
     b2caps = bundle2.bundle2caps(peer)
     if b2caps:
-        ui.write('Bundle2 capabilities:\n')
+        ui.write(b'Bundle2 capabilities:\n')
         for key, values in sorted(b2caps.iteritems()):
-            ui.write('  %s\n' % key)
+            ui.write(b'  %s\n' % key)
             for v in values:
-                ui.write('    %s\n' % v)
-
-
-@command('debugcheckstate', [], '')
+                ui.write(b'    %s\n' % v)
+
+
+@command(b'debugcheckstate', [], b'')
 def debugcheckstate(ui, repo):
     """validate the correctness of the current dirstate"""
     parent1, parent2 = repo.dirstate.parents()
@@ -452,35 +459,35 @@
     errors = 0
     for f in repo.dirstate:
         state = repo.dirstate[f]
-        if state in "nr" and f not in m1:
-            ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
+        if state in b"nr" and f not in m1:
+            ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
             errors += 1
-        if state in "a" and f in m1:
-            ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
+        if state in b"a" and f in m1:
+            ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
             errors += 1
-        if state in "m" and f not in m1 and f not in m2:
+        if state in b"m" and f not in m1 and f not in m2:
             ui.warn(
-                _("%s in state %s, but not in either manifest\n") % (f, state)
+                _(b"%s in state %s, but not in either manifest\n") % (f, state)
             )
             errors += 1
     for f in m1:
         state = repo.dirstate[f]
-        if state not in "nrm":
-            ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
+        if state not in b"nrm":
+            ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
             errors += 1
     if errors:
-        error = _(".hg/dirstate inconsistent with current parent's manifest")
+        error = _(b".hg/dirstate inconsistent with current parent's manifest")
         raise error.Abort(error)
 
 
 @command(
-    'debugcolor',
-    [('', 'style', None, _('show all configured styles'))],
-    'hg debugcolor',
+    b'debugcolor',
+    [(b'', b'style', None, _(b'show all configured styles'))],
+    b'hg debugcolor',
 )
 def debugcolor(ui, repo, **opts):
     """show available color, effects or style"""
-    ui.write('color mode: %s\n' % stringutil.pprint(ui._colormode))
+    ui.write(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
     if opts.get(r'style'):
         return _debugdisplaystyle(ui)
     else:
@@ -493,34 +500,34 @@
     for effect in color._activeeffects(ui).keys():
         ui._styles[effect] = effect
     if ui._terminfoparams:
-        for k, v in ui.configitems('color'):
-            if k.startswith('color.'):
+        for k, v in ui.configitems(b'color'):
+            if k.startswith(b'color.'):
                 ui._styles[k] = k[6:]
-            elif k.startswith('terminfo.'):
+            elif k.startswith(b'terminfo.'):
                 ui._styles[k] = k[9:]
-    ui.write(_('available colors:\n'))
+    ui.write(_(b'available colors:\n'))
     # sort label with a '_' after the other to group '_background' entry.
-    items = sorted(ui._styles.items(), key=lambda i: ('_' in i[0], i[0], i[1]))
+    items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
     for colorname, label in items:
-        ui.write('%s\n' % colorname, label=label)
+        ui.write(b'%s\n' % colorname, label=label)
 
 
 def _debugdisplaystyle(ui):
-    ui.write(_('available style:\n'))
+    ui.write(_(b'available style:\n'))
     if not ui._styles:
         return
     width = max(len(s) for s in ui._styles)
     for label, effects in sorted(ui._styles.items()):
-        ui.write('%s' % label, label=label)
+        ui.write(b'%s' % label, label=label)
         if effects:
             # 50
-            ui.write(': ')
-            ui.write(' ' * (max(0, width - len(label))))
-            ui.write(', '.join(ui.label(e, e) for e in effects.split()))
-        ui.write('\n')
-
-
-@command('debugcreatestreamclonebundle', [], 'FILE')
+            ui.write(b': ')
+            ui.write(b' ' * (max(0, width - len(label))))
+            ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
+        ui.write(b'\n')
+
+
+@command(b'debugcreatestreamclonebundle', [], b'FILE')
 def debugcreatestreamclonebundle(ui, repo, fname):
     """create a stream clone bundle file
 
@@ -532,26 +539,26 @@
     if phases.hassecret(repo):
         ui.warn(
             _(
-                '(warning: stream clone bundle will contain secret '
-                'revisions)\n'
+                b'(warning: stream clone bundle will contain secret '
+                b'revisions)\n'
             )
         )
 
     requirements, gen = streamclone.generatebundlev1(repo)
     changegroup.writechunks(ui, gen, fname)
 
-    ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
+    ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
 
 
 @command(
-    'debugdag',
+    b'debugdag',
     [
-        ('t', 'tags', None, _('use tags as labels')),
-        ('b', 'branches', None, _('annotate with branch names')),
-        ('', 'dots', None, _('use dots for runs')),
-        ('s', 'spaces', None, _('separate elements by spaces')),
+        (b't', b'tags', None, _(b'use tags as labels')),
+        (b'b', b'branches', None, _(b'annotate with branch names')),
+        (b'', b'dots', None, _(b'use dots for runs')),
+        (b's', b'spaces', None, _(b'separate elements by spaces')),
     ],
-    _('[OPTION]... [FILE [REV]...]'),
+    _(b'[OPTION]... [FILE [REV]...]'),
     optionalrepo=True,
 )
 def debugdag(ui, repo, file_=None, *revs, **opts):
@@ -570,9 +577,9 @@
 
         def events():
             for r in rlog:
-                yield 'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
+                yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
                 if r in revs:
-                    yield 'l', (r, "r%i" % r)
+                    yield b'l', (r, b"r%i" % r)
 
     elif repo:
         cl = repo.changelog
@@ -584,22 +591,22 @@
                 labels.setdefault(cl.rev(n), []).append(l)
 
         def events():
-            b = "default"
+            b = b"default"
             for r in cl:
                 if branches:
-                    newb = cl.read(cl.node(r))[5]['branch']
+                    newb = cl.read(cl.node(r))[5][b'branch']
                     if newb != b:
-                        yield 'a', newb
+                        yield b'a', newb
                         b = newb
-                yield 'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
+                yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
                 if tags:
                     ls = labels.get(r)
                     if ls:
                         for l in ls:
-                            yield 'l', (r, l)
+                            yield b'l', (r, l)
 
     else:
-        raise error.Abort(_('need repo for changelog dag'))
+        raise error.Abort(_(b'need repo for changelog dag'))
 
     for line in dagparser.dagtextlines(
         events(),
@@ -611,30 +618,30 @@
         maxlinewidth=70,
     ):
         ui.write(line)
-        ui.write("\n")
-
-
-@command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
+        ui.write(b"\n")
+
+
+@command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
 def debugdata(ui, repo, file_, rev=None, **opts):
     """dump the contents of a data file revision"""
     opts = pycompat.byteskwargs(opts)
-    if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
+    if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
         if rev is not None:
-            raise error.CommandError('debugdata', _('invalid arguments'))
+            raise error.CommandError(b'debugdata', _(b'invalid arguments'))
         file_, rev = None, file_
     elif rev is None:
-        raise error.CommandError('debugdata', _('invalid arguments'))
-    r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
+        raise error.CommandError(b'debugdata', _(b'invalid arguments'))
+    r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
     try:
         ui.write(r.rawdata(r.lookup(rev)))
     except KeyError:
-        raise error.Abort(_('invalid revision identifier %s') % rev)
+        raise error.Abort(_(b'invalid revision identifier %s') % rev)
 
 
 @command(
-    'debugdate',
-    [('e', 'extended', None, _('try extended date formats'))],
-    _('[-e] DATE [RANGE]'),
+    b'debugdate',
+    [(b'e', b'extended', None, _(b'try extended date formats'))],
+    _(b'[-e] DATE [RANGE]'),
     norepo=True,
     optionalrepo=True,
 )
@@ -644,17 +651,17 @@
         d = dateutil.parsedate(date, util.extendeddateformats)
     else:
         d = dateutil.parsedate(date)
-    ui.write("internal: %d %d\n" % d)
-    ui.write("standard: %s\n" % dateutil.datestr(d))
+    ui.write(b"internal: %d %d\n" % d)
+    ui.write(b"standard: %s\n" % dateutil.datestr(d))
     if range:
         m = dateutil.matchdate(range)
-        ui.write("match: %s\n" % m(d[0]))
+        ui.write(b"match: %s\n" % m(d[0]))
 
 
 @command(
-    'debugdeltachain',
+    b'debugdeltachain',
     cmdutil.debugrevlogopts + cmdutil.formatteropts,
-    _('-c|-m|FILE'),
+    _(b'-c|-m|FILE'),
     optionalrepo=True,
 )
 def debugdeltachain(ui, repo, file_=None, **opts):
@@ -693,7 +700,7 @@
     The sparse read can be enabled with experimental.sparse-read = True
     """
     opts = pycompat.byteskwargs(opts)
-    r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
+    r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
     index = r.index
     start = r.start
     length = r.length
@@ -708,20 +715,20 @@
 
         if generaldelta:
             if e[3] == e[5]:
-                deltatype = 'p1'
+                deltatype = b'p1'
             elif e[3] == e[6]:
-                deltatype = 'p2'
+                deltatype = b'p2'
             elif e[3] == rev - 1:
-                deltatype = 'prev'
+                deltatype = b'prev'
             elif e[3] == rev:
-                deltatype = 'base'
+                deltatype = b'base'
             else:
-                deltatype = 'other'
+                deltatype = b'other'
         else:
             if e[3] == rev:
-                deltatype = 'base'
+                deltatype = b'base'
             else:
-                deltatype = 'prev'
+                deltatype = b'prev'
 
         chain = r._deltachain(rev)[0]
         for iterrev in chain:
@@ -730,16 +737,16 @@
 
         return compsize, uncompsize, deltatype, chain, chainsize
 
-    fm = ui.formatter('debugdeltachain', opts)
+    fm = ui.formatter(b'debugdeltachain', opts)
 
     fm.plain(
-        '    rev  chain# chainlen     prev   delta       '
-        'size    rawsize  chainsize     ratio   lindist extradist '
-        'extraratio'
+        b'    rev  chain# chainlen     prev   delta       '
+        b'size    rawsize  chainsize     ratio   lindist extradist '
+        b'extraratio'
     )
     if withsparseread:
-        fm.plain('   readsize largestblk rddensity srchunks')
-    fm.plain('\n')
+        fm.plain(b'   readsize largestblk rddensity srchunks')
+    fm.plain(b'\n')
 
     chainbases = {}
     for rev in r:
@@ -767,10 +774,10 @@
 
         fm.startitem()
         fm.write(
-            'rev chainid chainlen prevrev deltatype compsize '
-            'uncompsize chainsize chainratio lindist extradist '
-            'extraratio',
-            '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
+            b'rev chainid chainlen prevrev deltatype compsize '
+            b'uncompsize chainsize chainratio lindist extradist '
+            b'extraratio',
+            b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
             rev,
             chainid,
             len(chain),
@@ -816,8 +823,8 @@
                 readdensity = 1
 
             fm.write(
-                'readsize largestblock readdensity srchunks',
-                ' %10d %10d %9.5f %8d',
+                b'readsize largestblock readdensity srchunks',
+                b' %10d %10d %9.5f %8d',
                 readsize,
                 largestblock,
                 readdensity,
@@ -828,19 +835,24 @@
                 srchunks=srchunks,
             )
 
-        fm.plain('\n')
+        fm.plain(b'\n')
 
     fm.end()
 
 
 @command(
-    'debugdirstate|debugstate',
+    b'debugdirstate|debugstate',
     [
-        ('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
-        ('', 'dates', True, _('display the saved mtime')),
-        ('', 'datesort', None, _('sort by saved mtime')),
+        (
+            b'',
+            b'nodates',
+            None,
+            _(b'do not display the saved mtime (DEPRECATED)'),
+        ),
+        (b'', b'dates', True, _(b'display the saved mtime')),
+        (b'', b'datesort', None, _(b'sort by saved mtime')),
     ],
-    _('[OPTION]...'),
+    _(b'[OPTION]...'),
 )
 def debugstate(ui, repo, **opts):
     """show the contents of the current dirstate"""
@@ -856,67 +868,67 @@
         keyfunc = None  # sort by filename
     for file_, ent in sorted(repo.dirstate.iteritems(), key=keyfunc):
         if ent[3] == -1:
-            timestr = 'unset               '
+            timestr = b'unset               '
         elif nodates:
-            timestr = 'set                 '
+            timestr = b'set                 '
         else:
             timestr = time.strftime(
                 r"%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
             )
             timestr = encoding.strtolocal(timestr)
         if ent[1] & 0o20000:
-            mode = 'lnk'
+            mode = b'lnk'
         else:
-            mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
-        ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
+            mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
+        ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
     for f in repo.dirstate.copies():
-        ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
+        ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
 
 
 @command(
-    'debugdiscovery',
+    b'debugdiscovery',
     [
-        ('', 'old', None, _('use old-style discovery')),
+        (b'', b'old', None, _(b'use old-style discovery')),
         (
-            '',
-            'nonheads',
+            b'',
+            b'nonheads',
             None,
-            _('use old-style discovery with non-heads included'),
+            _(b'use old-style discovery with non-heads included'),
         ),
-        ('', 'rev', [], 'restrict discovery to this set of revs'),
-        ('', 'seed', '12323', 'specify the random seed use for discovery'),
+        (b'', b'rev', [], b'restrict discovery to this set of revs'),
+        (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
     ]
     + cmdutil.remoteopts,
-    _('[--rev REV] [OTHER]'),
+    _(b'[--rev REV] [OTHER]'),
 )
-def debugdiscovery(ui, repo, remoteurl="default", **opts):
+def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
     """runs the changeset discovery protocol in isolation"""
     opts = pycompat.byteskwargs(opts)
     remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
     remote = hg.peer(repo, opts, remoteurl)
-    ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
+    ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
 
     # make sure tests are repeatable
-    random.seed(int(opts['seed']))
-
-    if opts.get('old'):
+    random.seed(int(opts[b'seed']))
+
+    if opts.get(b'old'):
 
         def doit(pushedrevs, remoteheads, remote=remote):
-            if not util.safehasattr(remote, 'branches'):
+            if not util.safehasattr(remote, b'branches'):
                 # enable in-client legacy support
                 remote = localrepo.locallegacypeer(remote.local())
             common, _in, hds = treediscovery.findcommonincoming(
                 repo, remote, force=True
             )
             common = set(common)
-            if not opts.get('nonheads'):
+            if not opts.get(b'nonheads'):
                 ui.write(
-                    "unpruned common: %s\n"
-                    % " ".join(sorted(short(n) for n in common))
+                    b"unpruned common: %s\n"
+                    % b" ".join(sorted(short(n) for n in common))
                 )
 
                 clnode = repo.changelog.node
-                common = repo.revs('heads(::%ln)', common)
+                common = repo.revs(b'heads(::%ln)', common)
                 common = {clnode(r) for r in common}
             return common, hds
 
@@ -933,8 +945,8 @@
             return common, hds
 
     remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
-    localrevs = opts['rev']
-    with util.timedcm('debug-discovery') as t:
+    localrevs = opts[b'rev']
+    with util.timedcm(b'debug-discovery') as t:
         common, hds = doit(localrevs, remoterevs)
 
     # compute all statistics
@@ -943,46 +955,48 @@
     lheads = set(repo.heads())
 
     data = {}
-    data['elapsed'] = t.elapsed
-    data['nb-common'] = len(common)
-    data['nb-common-local'] = len(common & lheads)
-    data['nb-common-remote'] = len(common & rheads)
-    data['nb-common-both'] = len(common & rheads & lheads)
-    data['nb-local'] = len(lheads)
-    data['nb-local-missing'] = data['nb-local'] - data['nb-common-local']
-    data['nb-remote'] = len(rheads)
-    data['nb-remote-unknown'] = data['nb-remote'] - data['nb-common-remote']
-    data['nb-revs'] = len(repo.revs('all()'))
-    data['nb-revs-common'] = len(repo.revs('::%ln', common))
-    data['nb-revs-missing'] = data['nb-revs'] - data['nb-revs-common']
+    data[b'elapsed'] = t.elapsed
+    data[b'nb-common'] = len(common)
+    data[b'nb-common-local'] = len(common & lheads)
+    data[b'nb-common-remote'] = len(common & rheads)
+    data[b'nb-common-both'] = len(common & rheads & lheads)
+    data[b'nb-local'] = len(lheads)
+    data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
+    data[b'nb-remote'] = len(rheads)
+    data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
+    data[b'nb-revs'] = len(repo.revs(b'all()'))
+    data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
+    data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
 
     # display discovery summary
-    ui.write("elapsed time:  %(elapsed)f seconds\n" % data)
-    ui.write("heads summary:\n")
-    ui.write("  total common heads:  %(nb-common)9d\n" % data)
-    ui.write("    also local heads:  %(nb-common-local)9d\n" % data)
-    ui.write("    also remote heads: %(nb-common-remote)9d\n" % data)
-    ui.write("    both:              %(nb-common-both)9d\n" % data)
-    ui.write("  local heads:         %(nb-local)9d\n" % data)
-    ui.write("    common:            %(nb-common-local)9d\n" % data)
-    ui.write("    missing:           %(nb-local-missing)9d\n" % data)
-    ui.write("  remote heads:        %(nb-remote)9d\n" % data)
-    ui.write("    common:            %(nb-common-remote)9d\n" % data)
-    ui.write("    unknown:           %(nb-remote-unknown)9d\n" % data)
-    ui.write("local changesets:      %(nb-revs)9d\n" % data)
-    ui.write("  common:              %(nb-revs-common)9d\n" % data)
-    ui.write("  missing:             %(nb-revs-missing)9d\n" % data)
+    ui.write(b"elapsed time:  %(elapsed)f seconds\n" % data)
+    ui.write(b"heads summary:\n")
+    ui.write(b"  total common heads:  %(nb-common)9d\n" % data)
+    ui.write(b"    also local heads:  %(nb-common-local)9d\n" % data)
+    ui.write(b"    also remote heads: %(nb-common-remote)9d\n" % data)
+    ui.write(b"    both:              %(nb-common-both)9d\n" % data)
+    ui.write(b"  local heads:         %(nb-local)9d\n" % data)
+    ui.write(b"    common:            %(nb-common-local)9d\n" % data)
+    ui.write(b"    missing:           %(nb-local-missing)9d\n" % data)
+    ui.write(b"  remote heads:        %(nb-remote)9d\n" % data)
+    ui.write(b"    common:            %(nb-common-remote)9d\n" % data)
+    ui.write(b"    unknown:           %(nb-remote-unknown)9d\n" % data)
+    ui.write(b"local changesets:      %(nb-revs)9d\n" % data)
+    ui.write(b"  common:              %(nb-revs-common)9d\n" % data)
+    ui.write(b"  missing:             %(nb-revs-missing)9d\n" % data)
 
     if ui.verbose:
         ui.write(
-            "common heads: %s\n" % " ".join(sorted(short(n) for n in common))
+            b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
         )
 
 
 _chunksize = 4 << 10
 
 
-@command('debugdownload', [('o', 'output', '', _('path')),], optionalrepo=True)
+@command(
+    b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
+)
 def debugdownload(ui, repo, url, output=None, **opts):
     """download a resource using Mercurial logic and config
     """
@@ -990,7 +1004,7 @@
 
     dest = ui
     if output:
-        dest = open(output, "wb", _chunksize)
+        dest = open(output, b"wb", _chunksize)
     try:
         data = fh.read(_chunksize)
         while data:
@@ -1001,89 +1015,95 @@
             dest.close()
 
 
-@command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
+@command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
 def debugextensions(ui, repo, **opts):
     '''show information about active extensions'''
     opts = pycompat.byteskwargs(opts)
     exts = extensions.extensions(ui)
     hgver = util.version()
-    fm = ui.formatter('debugextensions', opts)
+    fm = ui.formatter(b'debugextensions', opts)
     for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
         isinternal = extensions.ismoduleinternal(extmod)
         extsource = pycompat.fsencode(extmod.__file__)
         if isinternal:
             exttestedwith = []  # never expose magic string to users
         else:
-            exttestedwith = getattr(extmod, 'testedwith', '').split()
+            exttestedwith = getattr(extmod, 'testedwith', b'').split()
         extbuglink = getattr(extmod, 'buglink', None)
 
         fm.startitem()
 
         if ui.quiet or ui.verbose:
-            fm.write('name', '%s\n', extname)
+            fm.write(b'name', b'%s\n', extname)
         else:
-            fm.write('name', '%s', extname)
+            fm.write(b'name', b'%s', extname)
             if isinternal or hgver in exttestedwith:
-                fm.plain('\n')
+                fm.plain(b'\n')
             elif not exttestedwith:
-                fm.plain(_(' (untested!)\n'))
+                fm.plain(_(b' (untested!)\n'))
             else:
                 lasttestedversion = exttestedwith[-1]
-                fm.plain(' (%s!)\n' % lasttestedversion)
+                fm.plain(b' (%s!)\n' % lasttestedversion)
 
         fm.condwrite(
             ui.verbose and extsource,
-            'source',
-            _('  location: %s\n'),
-            extsource or "",
+            b'source',
+            _(b'  location: %s\n'),
+            extsource or b"",
         )
 
         if ui.verbose:
-            fm.plain(_('  bundled: %s\n') % ['no', 'yes'][isinternal])
+            fm.plain(_(b'  bundled: %s\n') % [b'no', b'yes'][isinternal])
         fm.data(bundled=isinternal)
 
         fm.condwrite(
             ui.verbose and exttestedwith,
-            'testedwith',
-            _('  tested with: %s\n'),
-            fm.formatlist(exttestedwith, name='ver'),
+            b'testedwith',
+            _(b'  tested with: %s\n'),
+            fm.formatlist(exttestedwith, name=b'ver'),
         )
 
         fm.condwrite(
             ui.verbose and extbuglink,
-            'buglink',
-            _('  bug reporting: %s\n'),
-            extbuglink or "",
+            b'buglink',
+            _(b'  bug reporting: %s\n'),
+            extbuglink or b"",
         )
 
     fm.end()
 
 
 @command(
-    'debugfileset',
+    b'debugfileset',
     [
-        ('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
         (
-            '',
-            'all-files',
-            False,
-            _('test files from all revisions and working directory'),
+            b'r',
+            b'rev',
+            b'',
+            _(b'apply the filespec on this revision'),
+            _(b'REV'),
         ),
         (
-            's',
-            'show-matcher',
-            None,
-            _('print internal representation of matcher'),
+            b'',
+            b'all-files',
+            False,
+            _(b'test files from all revisions and working directory'),
         ),
         (
-            'p',
-            'show-stage',
+            b's',
+            b'show-matcher',
+            None,
+            _(b'print internal representation of matcher'),
+        ),
+        (
+            b'p',
+            b'show-stage',
             [],
-            _('print parsed tree at the given stage'),
-            _('NAME'),
+            _(b'print parsed tree at the given stage'),
+            _(b'NAME'),
         ),
     ],
-    _('[-r REV] [--all-files] [OPTION]... FILESPEC'),
+    _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
 )
 def debugfileset(ui, repo, expr, **opts):
     '''parse and apply a fileset specification'''
@@ -1091,42 +1111,42 @@
 
     fileset.symbols  # force import of fileset so we have predicates to optimize
     opts = pycompat.byteskwargs(opts)
-    ctx = scmutil.revsingle(repo, opts.get('rev'), None)
+    ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
 
     stages = [
-        ('parsed', pycompat.identity),
-        ('analyzed', filesetlang.analyze),
-        ('optimized', filesetlang.optimize),
+        (b'parsed', pycompat.identity),
+        (b'analyzed', filesetlang.analyze),
+        (b'optimized', filesetlang.optimize),
     ]
     stagenames = set(n for n, f in stages)
 
     showalways = set()
-    if ui.verbose and not opts['show_stage']:
+    if ui.verbose and not opts[b'show_stage']:
         # show parsed tree by --verbose (deprecated)
-        showalways.add('parsed')
-    if opts['show_stage'] == ['all']:
+        showalways.add(b'parsed')
+    if opts[b'show_stage'] == [b'all']:
         showalways.update(stagenames)
     else:
-        for n in opts['show_stage']:
+        for n in opts[b'show_stage']:
             if n not in stagenames:
-                raise error.Abort(_('invalid stage name: %s') % n)
-        showalways.update(opts['show_stage'])
+                raise error.Abort(_(b'invalid stage name: %s') % n)
+        showalways.update(opts[b'show_stage'])
 
     tree = filesetlang.parse(expr)
     for n, f in stages:
         tree = f(tree)
         if n in showalways:
-            if opts['show_stage'] or n != 'parsed':
-                ui.write("* %s:\n" % n)
-            ui.write(filesetlang.prettyformat(tree), "\n")
+            if opts[b'show_stage'] or n != b'parsed':
+                ui.write(b"* %s:\n" % n)
+            ui.write(filesetlang.prettyformat(tree), b"\n")
 
     files = set()
-    if opts['all_files']:
+    if opts[b'all_files']:
         for r in repo:
             c = repo[r]
             files.update(c.files())
             files.update(c.substate)
-    if opts['all_files'] or ctx.rev() is None:
+    if opts[b'all_files'] or ctx.rev() is None:
         wctx = repo[None]
         files.update(
             repo.dirstate.walk(
@@ -1142,15 +1162,15 @@
         files.update(ctx.substate)
 
     m = ctx.matchfileset(expr)
-    if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
-        ui.write('* matcher:\n', stringutil.prettyrepr(m), '\n')
+    if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
+        ui.write(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
     for f in sorted(files):
         if not m(f):
             continue
-        ui.write("%s\n" % f)
-
-
-@command('debugformat', [] + cmdutil.formatteropts)
+        ui.write(b"%s\n" % f)
+
+
+@command(b'debugformat', [] + cmdutil.formatteropts)
 def debugformat(ui, repo, **opts):
     """display format information about the current repository
 
@@ -1158,96 +1178,102 @@
     Mercurial default."""
     opts = pycompat.byteskwargs(opts)
     maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
-    maxvariantlength = max(len('format-variant'), maxvariantlength)
+    maxvariantlength = max(len(b'format-variant'), maxvariantlength)
 
     def makeformatname(name):
-        return '%s:' + (' ' * (maxvariantlength - len(name)))
-
-    fm = ui.formatter('debugformat', opts)
+        return b'%s:' + (b' ' * (maxvariantlength - len(name)))
+
+    fm = ui.formatter(b'debugformat', opts)
     if fm.isplain():
 
         def formatvalue(value):
-            if util.safehasattr(value, 'startswith'):
+            if util.safehasattr(value, b'startswith'):
                 return value
             if value:
-                return 'yes'
+                return b'yes'
             else:
-                return 'no'
+                return b'no'
 
     else:
         formatvalue = pycompat.identity
 
-    fm.plain('format-variant')
-    fm.plain(' ' * (maxvariantlength - len('format-variant')))
-    fm.plain(' repo')
+    fm.plain(b'format-variant')
+    fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
+    fm.plain(b' repo')
     if ui.verbose:
-        fm.plain(' config default')
-    fm.plain('\n')
+        fm.plain(b' config default')
+    fm.plain(b'\n')
     for fv in upgrade.allformatvariant:
         fm.startitem()
         repovalue = fv.fromrepo(repo)
         configvalue = fv.fromconfig(repo)
 
         if repovalue != configvalue:
-            namelabel = 'formatvariant.name.mismatchconfig'
-            repolabel = 'formatvariant.repo.mismatchconfig'
+            namelabel = b'formatvariant.name.mismatchconfig'
+            repolabel = b'formatvariant.repo.mismatchconfig'
         elif repovalue != fv.default:
-            namelabel = 'formatvariant.name.mismatchdefault'
-            repolabel = 'formatvariant.repo.mismatchdefault'
+            namelabel = b'formatvariant.name.mismatchdefault'
+            repolabel = b'formatvariant.repo.mismatchdefault'
         else:
-            namelabel = 'formatvariant.name.uptodate'
-            repolabel = 'formatvariant.repo.uptodate'
-
-        fm.write('name', makeformatname(fv.name), fv.name, label=namelabel)
-        fm.write('repo', ' %3s', formatvalue(repovalue), label=repolabel)
+            namelabel = b'formatvariant.name.uptodate'
+            repolabel = b'formatvariant.repo.uptodate'
+
+        fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
+        fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
         if fv.default != configvalue:
-            configlabel = 'formatvariant.config.special'
+            configlabel = b'formatvariant.config.special'
         else:
-            configlabel = 'formatvariant.config.default'
+            configlabel = b'formatvariant.config.default'
         fm.condwrite(
             ui.verbose,
-            'config',
-            ' %6s',
+            b'config',
+            b' %6s',
             formatvalue(configvalue),
             label=configlabel,
         )
         fm.condwrite(
             ui.verbose,
-            'default',
-            ' %7s',
+            b'default',
+            b' %7s',
             formatvalue(fv.default),
-            label='formatvariant.default',
+            label=b'formatvariant.default',
         )
-        fm.plain('\n')
+        fm.plain(b'\n')
     fm.end()
 
 
-@command('debugfsinfo', [], _('[PATH]'), norepo=True)
-def debugfsinfo(ui, path="."):
+@command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
+def debugfsinfo(ui, path=b"."):
     """show information detected about current filesystem"""
-    ui.write('path: %s\n' % path)
-    ui.write('mounted on: %s\n' % (util.getfsmountpoint(path) or '(unknown)'))
-    ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
-    ui.write('fstype: %s\n' % (util.getfstype(path) or '(unknown)'))
-    ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
-    ui.write('hardlink: %s\n' % (util.checknlink(path) and 'yes' or 'no'))
-    casesensitive = '(unknown)'
+    ui.write(b'path: %s\n' % path)
+    ui.write(b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)'))
+    ui.write(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
+    ui.write(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
+    ui.write(b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no'))
+    ui.write(b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no'))
+    casesensitive = b'(unknown)'
     try:
-        with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
-            casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
+        with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
+            casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
     except OSError:
         pass
-    ui.write('case-sensitive: %s\n' % casesensitive)
+    ui.write(b'case-sensitive: %s\n' % casesensitive)
 
 
 @command(
-    'debuggetbundle',
+    b'debuggetbundle',
     [
-        ('H', 'head', [], _('id of head node'), _('ID')),
-        ('C', 'common', [], _('id of common node'), _('ID')),
-        ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
+        (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
+        (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
+        (
+            b't',
+            b'type',
+            b'bzip2',
+            _(b'bundle compression type to use'),
+            _(b'TYPE'),
+        ),
     ],
-    _('REPO FILE [-H|-C ID]...'),
+    _(b'REPO FILE [-H|-C ID]...'),
     norepo=True,
 )
 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
@@ -1258,8 +1284,8 @@
     """
     opts = pycompat.byteskwargs(opts)
     repo = hg.peer(ui, opts, repopath)
-    if not repo.capable('getbundle'):
-        raise error.Abort("getbundle() not supported by target repository")
+    if not repo.capable(b'getbundle'):
+        raise error.Abort(b"getbundle() not supported by target repository")
     args = {}
     if common:
         args[r'common'] = [bin(s) for s in common]
@@ -1267,22 +1293,22 @@
         args[r'heads'] = [bin(s) for s in head]
     # TODO: get desired bundlecaps from command line.
     args[r'bundlecaps'] = None
-    bundle = repo.getbundle('debug', **args)
-
-    bundletype = opts.get('type', 'bzip2').lower()
+    bundle = repo.getbundle(b'debug', **args)
+
+    bundletype = opts.get(b'type', b'bzip2').lower()
     btypes = {
-        'none': 'HG10UN',
-        'bzip2': 'HG10BZ',
-        'gzip': 'HG10GZ',
-        'bundle2': 'HG20',
+        b'none': b'HG10UN',
+        b'bzip2': b'HG10BZ',
+        b'gzip': b'HG10GZ',
+        b'bundle2': b'HG20',
     }
     bundletype = btypes.get(bundletype)
     if bundletype not in bundle2.bundletypes:
-        raise error.Abort(_('unknown bundle type specified with --type'))
+        raise error.Abort(_(b'unknown bundle type specified with --type'))
     bundle2.writebundle(ui, bundle, bundlepath, bundletype)
 
 
-@command('debugignore', [], '[FILE]')
+@command(b'debugignore', [], b'[FILE]')
 def debugignore(ui, repo, *files, **opts):
     """display the combined ignore pattern and information about ignored files
 
@@ -1294,7 +1320,7 @@
     ignore = repo.dirstate._ignore
     if not files:
         # Show all the patterns
-        ui.write("%s\n" % pycompat.byterepr(ignore))
+        ui.write(b"%s\n" % pycompat.byterepr(ignore))
     else:
         m = scmutil.match(repo[None], pats=files)
         uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
@@ -1302,7 +1328,7 @@
             nf = util.normpath(f)
             ignored = None
             ignoredata = None
-            if nf != '.':
+            if nf != b'.':
                 if ignore(nf):
                     ignored = nf
                     ignoredata = repo.dirstate._ignorefileandline(nf)
@@ -1314,33 +1340,33 @@
                             break
             if ignored:
                 if ignored == nf:
-                    ui.write(_("%s is ignored\n") % uipathfn(f))
+                    ui.write(_(b"%s is ignored\n") % uipathfn(f))
                 else:
                     ui.write(
                         _(
-                            "%s is ignored because of "
-                            "containing directory %s\n"
+                            b"%s is ignored because of "
+                            b"containing directory %s\n"
                         )
                         % (uipathfn(f), ignored)
                     )
                 ignorefile, lineno, line = ignoredata
                 ui.write(
-                    _("(ignore rule in %s, line %d: '%s')\n")
+                    _(b"(ignore rule in %s, line %d: '%s')\n")
                     % (ignorefile, lineno, line)
                 )
             else:
-                ui.write(_("%s is not ignored\n") % uipathfn(f))
+                ui.write(_(b"%s is not ignored\n") % uipathfn(f))
 
 
 @command(
-    'debugindex',
+    b'debugindex',
     cmdutil.debugrevlogopts + cmdutil.formatteropts,
-    _('-c|-m|FILE'),
+    _(b'-c|-m|FILE'),
 )
 def debugindex(ui, repo, file_=None, **opts):
     """dump index data for a storage primitive"""
     opts = pycompat.byteskwargs(opts)
-    store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
+    store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
 
     if ui.debugflag:
         shortfn = hex
@@ -1352,7 +1378,7 @@
         idlen = len(shortfn(store.node(i)))
         break
 
-    fm = ui.formatter('debugindex', opts)
+    fm = ui.formatter(b'debugindex', opts)
     fm.plain(
         b'   rev linkrev %s %s p2\n'
         % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
@@ -1364,44 +1390,47 @@
 
         fm.startitem()
         fm.write(b'rev', b'%6d ', rev)
-        fm.write(b'linkrev', '%7d ', store.linkrev(rev))
-        fm.write(b'node', '%s ', shortfn(node))
-        fm.write(b'p1', '%s ', shortfn(parents[0]))
-        fm.write(b'p2', '%s', shortfn(parents[1]))
+        fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
+        fm.write(b'node', b'%s ', shortfn(node))
+        fm.write(b'p1', b'%s ', shortfn(parents[0]))
+        fm.write(b'p2', b'%s', shortfn(parents[1]))
         fm.plain(b'\n')
 
     fm.end()
 
 
 @command(
-    'debugindexdot', cmdutil.debugrevlogopts, _('-c|-m|FILE'), optionalrepo=True
+    b'debugindexdot',
+    cmdutil.debugrevlogopts,
+    _(b'-c|-m|FILE'),
+    optionalrepo=True,
 )
 def debugindexdot(ui, repo, file_=None, **opts):
     """dump an index DAG as a graphviz dot file"""
     opts = pycompat.byteskwargs(opts)
-    r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
-    ui.write("digraph G {\n")
+    r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
+    ui.write(b"digraph G {\n")
     for i in r:
         node = r.node(i)
         pp = r.parents(node)
-        ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
+        ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
         if pp[1] != nullid:
-            ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
-    ui.write("}\n")
-
-
-@command('debugindexstats', [])
+            ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
+    ui.write(b"}\n")
+
+
+@command(b'debugindexstats', [])
 def debugindexstats(ui, repo):
     """show stats related to the changelog index"""
     repo.changelog.shortest(nullid, 1)
     index = repo.changelog.index
-    if not util.safehasattr(index, 'stats'):
-        raise error.Abort(_('debugindexstats only works with native code'))
+    if not util.safehasattr(index, b'stats'):
+        raise error.Abort(_(b'debugindexstats only works with native code'))
     for k, v in sorted(index.stats().items()):
-        ui.write('%s: %d\n' % (k, v))
-
-
-@command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
+        ui.write(b'%s: %d\n' % (k, v))
+
+
+@command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
 def debuginstall(ui, **opts):
     '''test Mercurial installation
 
@@ -1411,11 +1440,11 @@
 
     problems = 0
 
-    fm = ui.formatter('debuginstall', opts)
+    fm = ui.formatter(b'debuginstall', opts)
     fm.startitem()
 
     # encoding
-    fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
+    fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
     err = None
     try:
         codecs.lookup(pycompat.sysstr(encoding.encoding))
@@ -1424,52 +1453,52 @@
         problems += 1
     fm.condwrite(
         err,
-        'encodingerror',
-        _(" %s\n" " (check that your locale is properly set)\n"),
+        b'encodingerror',
+        _(b" %s\n" b" (check that your locale is properly set)\n"),
         err,
     )
 
     # Python
     fm.write(
-        'pythonexe',
-        _("checking Python executable (%s)\n"),
-        pycompat.sysexecutable or _("unknown"),
+        b'pythonexe',
+        _(b"checking Python executable (%s)\n"),
+        pycompat.sysexecutable or _(b"unknown"),
     )
     fm.write(
-        'pythonver',
-        _("checking Python version (%s)\n"),
-        ("%d.%d.%d" % sys.version_info[:3]),
+        b'pythonver',
+        _(b"checking Python version (%s)\n"),
+        (b"%d.%d.%d" % sys.version_info[:3]),
     )
     fm.write(
-        'pythonlib',
-        _("checking Python lib (%s)...\n"),
+        b'pythonlib',
+        _(b"checking Python lib (%s)...\n"),
         os.path.dirname(pycompat.fsencode(os.__file__)),
     )
 
     security = set(sslutil.supportedprotocols)
     if sslutil.hassni:
-        security.add('sni')
+        security.add(b'sni')
 
     fm.write(
-        'pythonsecurity',
-        _("checking Python security support (%s)\n"),
-        fm.formatlist(sorted(security), name='protocol', fmt='%s', sep=','),
+        b'pythonsecurity',
+        _(b"checking Python security support (%s)\n"),
+        fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
     )
 
     # These are warnings, not errors. So don't increment problem count. This
     # may change in the future.
-    if 'tls1.2' not in security:
+    if b'tls1.2' not in security:
         fm.plain(
             _(
-                '  TLS 1.2 not supported by Python install; '
-                'network connections lack modern security\n'
+                b'  TLS 1.2 not supported by Python install; '
+                b'network connections lack modern security\n'
             )
         )
-    if 'sni' not in security:
+    if b'sni' not in security:
         fm.plain(
             _(
-                '  SNI not supported by Python install; may have '
-                'connectivity issues with some servers\n'
+                b'  SNI not supported by Python install; may have '
+                b'connectivity issues with some servers\n'
             )
         )
 
@@ -1478,27 +1507,27 @@
     # hg version
     hgver = util.version()
     fm.write(
-        'hgver', _("checking Mercurial version (%s)\n"), hgver.split('+')[0]
+        b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
     )
     fm.write(
-        'hgverextra',
-        _("checking Mercurial custom build (%s)\n"),
-        '+'.join(hgver.split('+')[1:]),
+        b'hgverextra',
+        _(b"checking Mercurial custom build (%s)\n"),
+        b'+'.join(hgver.split(b'+')[1:]),
     )
 
     # compiled modules
     fm.write(
-        'hgmodulepolicy', _("checking module policy (%s)\n"), policy.policy
+        b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
     )
     fm.write(
-        'hgmodules',
-        _("checking installed modules (%s)...\n"),
+        b'hgmodules',
+        _(b"checking installed modules (%s)...\n"),
         os.path.dirname(pycompat.fsencode(__file__)),
     )
 
-    rustandc = policy.policy in ('rust+c', 'rust+c-allow')
+    rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
     rustext = rustandc  # for now, that's the only case
-    cext = policy.policy in ('c', 'allow') or rustandc
+    cext = policy.policy in (b'c', b'allow') or rustandc
     nopure = cext or rustext
     if nopure:
         err = None
@@ -1523,54 +1552,57 @@
         except Exception as inst:
             err = stringutil.forcebytestr(inst)
             problems += 1
-        fm.condwrite(err, 'extensionserror', " %s\n", err)
+        fm.condwrite(err, b'extensionserror', b" %s\n", err)
 
     compengines = util.compengines._engines.values()
     fm.write(
-        'compengines',
-        _('checking registered compression engines (%s)\n'),
+        b'compengines',
+        _(b'checking registered compression engines (%s)\n'),
         fm.formatlist(
             sorted(e.name() for e in compengines),
-            name='compengine',
-            fmt='%s',
-            sep=', ',
+            name=b'compengine',
+            fmt=b'%s',
+            sep=b', ',
         ),
     )
     fm.write(
-        'compenginesavail',
-        _('checking available compression engines ' '(%s)\n'),
+        b'compenginesavail',
+        _(b'checking available compression engines ' b'(%s)\n'),
         fm.formatlist(
             sorted(e.name() for e in compengines if e.available()),
-            name='compengine',
-            fmt='%s',
-            sep=', ',
+            name=b'compengine',
+            fmt=b'%s',
+            sep=b', ',
         ),
     )
     wirecompengines = compression.compengines.supportedwireengines(
         compression.SERVERROLE
     )
     fm.write(
-        'compenginesserver',
-        _('checking available compression engines ' 'for wire protocol (%s)\n'),
+        b'compenginesserver',
+        _(
+            b'checking available compression engines '
+            b'for wire protocol (%s)\n'
+        ),
         fm.formatlist(
             [e.name() for e in wirecompengines if e.wireprotosupport()],
-            name='compengine',
-            fmt='%s',
-            sep=', ',
+            name=b'compengine',
+            fmt=b'%s',
+            sep=b', ',
         ),
     )
-    re2 = 'missing'
+    re2 = b'missing'
     if util._re2:
-        re2 = 'available'
-    fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
+        re2 = b'available'
+    fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
     fm.data(re2=bool(util._re2))
 
     # templates
     p = templater.templatepaths()
-    fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
-    fm.condwrite(not p, '', _(" no template directories found\n"))
+    fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
+    fm.condwrite(not p, b'', _(b" no template directories found\n"))
     if p:
-        m = templater.templatepath("map-cmdline.default")
+        m = templater.templatepath(b"map-cmdline.default")
         if m:
             # template found, check if it is working
             err = None
@@ -1579,51 +1611,51 @@
             except Exception as inst:
                 err = stringutil.forcebytestr(inst)
                 p = None
-            fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
+            fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
         else:
             p = None
         fm.condwrite(
-            p, 'defaulttemplate', _("checking default template (%s)\n"), m
+            p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
         )
         fm.condwrite(
             not m,
-            'defaulttemplatenotfound',
-            _(" template '%s' not found\n"),
-            "default",
+            b'defaulttemplatenotfound',
+            _(b" template '%s' not found\n"),
+            b"default",
         )
     if not p:
         problems += 1
     fm.condwrite(
-        not p, '', _(" (templates seem to have been installed incorrectly)\n")
+        not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
     )
 
     # editor
     editor = ui.geteditor()
     editor = util.expandpath(editor)
     editorbin = procutil.shellsplit(editor)[0]
-    fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
+    fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
     cmdpath = procutil.findexe(editorbin)
     fm.condwrite(
-        not cmdpath and editor == 'vi',
-        'vinotfound',
+        not cmdpath and editor == b'vi',
+        b'vinotfound',
         _(
-            " No commit editor set and can't find %s in PATH\n"
-            " (specify a commit editor in your configuration"
-            " file)\n"
+            b" No commit editor set and can't find %s in PATH\n"
+            b" (specify a commit editor in your configuration"
+            b" file)\n"
         ),
-        not cmdpath and editor == 'vi' and editorbin,
+        not cmdpath and editor == b'vi' and editorbin,
     )
     fm.condwrite(
-        not cmdpath and editor != 'vi',
-        'editornotfound',
+        not cmdpath and editor != b'vi',
+        b'editornotfound',
         _(
-            " Can't find editor '%s' in PATH\n"
-            " (specify a commit editor in your configuration"
-            " file)\n"
+            b" Can't find editor '%s' in PATH\n"
+            b" (specify a commit editor in your configuration"
+            b" file)\n"
         ),
         not cmdpath and editorbin,
     )
-    if not cmdpath and editor != 'vi':
+    if not cmdpath and editor != b'vi':
         problems += 1
 
     # check username
@@ -1635,13 +1667,15 @@
         err = stringutil.forcebytestr(e)
         problems += 1
 
-    fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
+    fm.condwrite(
+        username, b'username', _(b"checking username (%s)\n"), username
+    )
     fm.condwrite(
         err,
-        'usernameerror',
+        b'usernameerror',
         _(
-            "checking username...\n %s\n"
-            " (specify a username in your configuration file)\n"
+            b"checking username...\n %s\n"
+            b" (specify a username in your configuration file)\n"
         ),
         err,
     )
@@ -1651,13 +1685,13 @@
         if handler is not None:
             problems += handler(ui, fm)
 
-    fm.condwrite(not problems, '', _("no problems detected\n"))
+    fm.condwrite(not problems, b'', _(b"no problems detected\n"))
     if not problems:
         fm.data(problems=problems)
     fm.condwrite(
         problems,
-        'problems',
-        _("%d problems detected," " please check your install!\n"),
+        b'problems',
+        _(b"%d problems detected," b" please check your install!\n"),
         problems,
     )
     fm.end()
@@ -1665,7 +1699,7 @@
     return problems
 
 
-@command('debugknown', [], _('REPO ID...'), norepo=True)
+@command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
 def debugknown(ui, repopath, *ids, **opts):
     """test whether node ids are known to a repo
 
@@ -1674,32 +1708,37 @@
     """
     opts = pycompat.byteskwargs(opts)
     repo = hg.peer(ui, opts, repopath)
-    if not repo.capable('known'):
-        raise error.Abort("known() not supported by target repository")
+    if not repo.capable(b'known'):
+        raise error.Abort(b"known() not supported by target repository")
     flags = repo.known([bin(s) for s in ids])
-    ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
-
-
-@command('debuglabelcomplete', [], _('LABEL...'))
+    ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
+
+
+@command(b'debuglabelcomplete', [], _(b'LABEL...'))
 def debuglabelcomplete(ui, repo, *args):
     '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
     debugnamecomplete(ui, repo, *args)
 
 
 @command(
-    'debuglocks',
+    b'debuglocks',
     [
-        ('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
+        (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
         (
-            'W',
-            'force-wlock',
+            b'W',
+            b'force-wlock',
             None,
-            _('free the working state lock (DANGEROUS)'),
+            _(b'free the working state lock (DANGEROUS)'),
         ),
-        ('s', 'set-lock', None, _('set the store lock until stopped')),
-        ('S', 'set-wlock', None, _('set the working state lock until stopped')),
+        (b's', b'set-lock', None, _(b'set the store lock until stopped')),
+        (
+            b'S',
+            b'set-wlock',
+            None,
+            _(b'set the working state lock until stopped'),
+        ),
     ],
-    _('[OPTION]...'),
+    _(b'[OPTION]...'),
 )
 def debuglocks(ui, repo, **opts):
     """show or modify state of locks
@@ -1727,9 +1766,9 @@
     """
 
     if opts.get(r'force_lock'):
-        repo.svfs.unlink('lock')
+        repo.svfs.unlink(b'lock')
     if opts.get(r'force_wlock'):
-        repo.vfs.unlink('wlock')
+        repo.vfs.unlink(b'wlock')
     if opts.get(r'force_lock') or opts.get(r'force_wlock'):
         return 0
 
@@ -1739,14 +1778,14 @@
             try:
                 locks.append(repo.wlock(False))
             except error.LockHeld:
-                raise error.Abort(_('wlock is already held'))
+                raise error.Abort(_(b'wlock is already held'))
         if opts.get(r'set_lock'):
             try:
                 locks.append(repo.lock(False))
             except error.LockHeld:
-                raise error.Abort(_('lock is already held'))
+                raise error.Abort(_(b'lock is already held'))
         if len(locks):
-            ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
+            ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
             return 0
     finally:
         release(*locks)
@@ -1769,44 +1808,44 @@
                 age = now - st[stat.ST_MTIME]
                 user = util.username(st.st_uid)
                 locker = vfs.readlock(name)
-                if ":" in locker:
-                    host, pid = locker.split(':')
+                if b":" in locker:
+                    host, pid = locker.split(b':')
                     if host == socket.gethostname():
-                        locker = 'user %s, process %s' % (user or b'None', pid)
+                        locker = b'user %s, process %s' % (user or b'None', pid)
                     else:
-                        locker = 'user %s, process %s, host %s' % (
+                        locker = b'user %s, process %s, host %s' % (
                             user or b'None',
                             pid,
                             host,
                         )
-                ui.write("%-6s %s (%ds)\n" % (name + ":", locker, age))
+                ui.write(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
                 return 1
             except OSError as e:
                 if e.errno != errno.ENOENT:
                     raise
 
-        ui.write("%-6s free\n" % (name + ":"))
+        ui.write(b"%-6s free\n" % (name + b":"))
         return 0
 
-    held += report(repo.svfs, "lock", repo.lock)
-    held += report(repo.vfs, "wlock", repo.wlock)
+    held += report(repo.svfs, b"lock", repo.lock)
+    held += report(repo.vfs, b"wlock", repo.wlock)
 
     return held
 
 
 @command(
-    'debugmanifestfulltextcache',
+    b'debugmanifestfulltextcache',
     [
-        ('', 'clear', False, _('clear the cache')),
+        (b'', b'clear', False, _(b'clear the cache')),
         (
-            'a',
-            'add',
+            b'a',
+            b'add',
             [],
-            _('add the given manifest nodes to the cache'),
-            _('NODE'),
+            _(b'add the given manifest nodes to the cache'),
+            _(b'NODE'),
         ),
     ],
-    '',
+    b'',
 )
 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
     """show, clear or amend the contents of the manifest fulltext cache"""
@@ -1817,8 +1856,8 @@
             return r._fulltextcache
         except AttributeError:
             msg = _(
-                "Current revlog implementation doesn't appear to have a "
-                "manifest fulltext cache\n"
+                b"Current revlog implementation doesn't appear to have a "
+                b"manifest fulltext cache\n"
             )
             raise error.Abort(msg)
 
@@ -1836,18 +1875,18 @@
                 try:
                     manifest = m[store.lookup(n)]
                 except error.LookupError as e:
-                    raise error.Abort(e, hint="Check your manifest node id")
+                    raise error.Abort(e, hint=b"Check your manifest node id")
                 manifest.read()  # stores revisision in cache too
             return
 
     cache = getcache()
     if not len(cache):
-        ui.write(_('cache empty\n'))
+        ui.write(_(b'cache empty\n'))
     else:
         ui.write(
             _(
-                'cache contains %d manifest entries, in order of most to '
-                'least recent:\n'
+                b'cache contains %d manifest entries, in order of most to '
+                b'least recent:\n'
             )
             % (len(cache),)
         )
@@ -1858,16 +1897,16 @@
             size = len(data)
             totalsize += size + 24  # 20 bytes nodeid, 4 bytes size
             ui.write(
-                _('id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
+                _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
             )
-        ondisk = cache._opener.stat('manifestfulltextcache').st_size
+        ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
         ui.write(
-            _('total cache data size %s, on-disk %s\n')
+            _(b'total cache data size %s, on-disk %s\n')
             % (util.bytecount(totalsize), util.bytecount(ondisk))
         )
 
 
-@command('debugmergestate', [], '')
+@command(b'debugmergestate', [], b'')
 def debugmergestate(ui, repo, *args):
     """print merge state
 
@@ -1876,12 +1915,12 @@
 
     def _hashornull(h):
         if h == nullhex:
-            return 'null'
+            return b'null'
         else:
             return h
 
     def printrecords(version):
-        ui.write('* version %d records\n' % version)
+        ui.write(b'* version %d records\n' % version)
         if version == 1:
             records = v1records
         else:
@@ -1889,58 +1928,59 @@
 
         for rtype, record in records:
             # pretty print some record types
-            if rtype == 'L':
-                ui.write('local: %s\n' % record)
-            elif rtype == 'O':
-                ui.write('other: %s\n' % record)
-            elif rtype == 'm':
-                driver, mdstate = record.split('\0', 1)
-                ui.write('merge driver: %s (state "%s")\n' % (driver, mdstate))
-            elif rtype in 'FDC':
-                r = record.split('\0')
+            if rtype == b'L':
+                ui.write(b'local: %s\n' % record)
+            elif rtype == b'O':
+                ui.write(b'other: %s\n' % record)
+            elif rtype == b'm':
+                driver, mdstate = record.split(b'\0', 1)
+                ui.write(b'merge driver: %s (state "%s")\n' % (driver, mdstate))
+            elif rtype in b'FDC':
+                r = record.split(b'\0')
                 f, state, hash, lfile, afile, anode, ofile = r[0:7]
                 if version == 1:
-                    onode = 'not stored in v1 format'
+                    onode = b'not stored in v1 format'
                     flags = r[7]
                 else:
                     onode, flags = r[7:9]
                 ui.write(
-                    'file: %s (record type "%s", state "%s", hash %s)\n'
+                    b'file: %s (record type "%s", state "%s", hash %s)\n'
                     % (f, rtype, state, _hashornull(hash))
                 )
-                ui.write('  local path: %s (flags "%s")\n' % (lfile, flags))
+                ui.write(b'  local path: %s (flags "%s")\n' % (lfile, flags))
                 ui.write(
-                    '  ancestor path: %s (node %s)\n'
+                    b'  ancestor path: %s (node %s)\n'
                     % (afile, _hashornull(anode))
                 )
                 ui.write(
-                    '  other path: %s (node %s)\n' % (ofile, _hashornull(onode))
+                    b'  other path: %s (node %s)\n'
+                    % (ofile, _hashornull(onode))
                 )
-            elif rtype == 'f':
-                filename, rawextras = record.split('\0', 1)
-                extras = rawextras.split('\0')
+            elif rtype == b'f':
+                filename, rawextras = record.split(b'\0', 1)
+                extras = rawextras.split(b'\0')
                 i = 0
                 extrastrings = []
                 while i < len(extras):
-                    extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
+                    extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
                     i += 2
 
                 ui.write(
-                    'file extras: %s (%s)\n'
-                    % (filename, ', '.join(extrastrings))
+                    b'file extras: %s (%s)\n'
+                    % (filename, b', '.join(extrastrings))
                 )
-            elif rtype == 'l':
-                labels = record.split('\0', 2)
+            elif rtype == b'l':
+                labels = record.split(b'\0', 2)
                 labels = [l for l in labels if len(l) > 0]
-                ui.write('labels:\n')
-                ui.write(('  local: %s\n' % labels[0]))
-                ui.write(('  other: %s\n' % labels[1]))
+                ui.write(b'labels:\n')
+                ui.write((b'  local: %s\n' % labels[0]))
+                ui.write((b'  other: %s\n' % labels[1]))
                 if len(labels) > 2:
-                    ui.write(('  base:  %s\n' % labels[2]))
+                    ui.write((b'  base:  %s\n' % labels[2]))
             else:
                 ui.write(
-                    'unrecognized entry: %s\t%s\n'
-                    % (rtype, record.replace('\0', '\t'))
+                    b'unrecognized entry: %s\t%s\n'
+                    % (rtype, record.replace(b'\0', b'\t'))
                 )
 
     # Avoid mergestate.read() since it may raise an exception for unsupported
@@ -1951,7 +1991,7 @@
     # sort so that reasonable information is on top
     v1records = ms._readrecordsv1()
     v2records = ms._readrecordsv2()
-    order = 'LOml'
+    order = b'LOml'
 
     def key(r):
         idx = order.find(r[0])
@@ -1964,21 +2004,21 @@
     v2records.sort(key=key)
 
     if not v1records and not v2records:
-        ui.write('no merge state found\n')
+        ui.write(b'no merge state found\n')
     elif not v2records:
-        ui.note('no version 2 merge state\n')
+        ui.note(b'no version 2 merge state\n')
         printrecords(1)
     elif ms._v1v2match(v1records, v2records):
-        ui.note('v1 and v2 states match: using v2\n')
+        ui.note(b'v1 and v2 states match: using v2\n')
         printrecords(2)
     else:
-        ui.note('v1 and v2 states mismatch: using v1\n')
+        ui.note(b'v1 and v2 states mismatch: using v1\n')
         printrecords(1)
         if ui.verbose:
             printrecords(2)
 
 
-@command('debugnamecomplete', [], _('NAME...'))
+@command(b'debugnamecomplete', [], _(b'NAME...'))
 def debugnamecomplete(ui, repo, *args):
     '''complete "names" - tags, open branch names, bookmark names'''
 
@@ -1986,7 +2026,7 @@
     # since we previously only listed open branches, we will handle that
     # specially (after this for loop)
     for name, ns in repo.names.iteritems():
-        if name != 'branches':
+        if name != b'branches':
             names.update(ns.listnames(repo))
     names.update(
         tag
@@ -1995,36 +2035,36 @@
     )
     completions = set()
     if not args:
-        args = ['']
+        args = [b'']
     for a in args:
         completions.update(n for n in names if n.startswith(a))
-    ui.write('\n'.join(sorted(completions)))
-    ui.write('\n')
+    ui.write(b'\n'.join(sorted(completions)))
+    ui.write(b'\n')
 
 
 @command(
-    'debugobsolete',
+    b'debugobsolete',
     [
-        ('', 'flags', 0, _('markers flag')),
+        (b'', b'flags', 0, _(b'markers flag')),
         (
-            '',
-            'record-parents',
+            b'',
+            b'record-parents',
             False,
-            _('record parent information for the precursor'),
+            _(b'record parent information for the precursor'),
         ),
-        ('r', 'rev', [], _('display markers relevant to REV')),
+        (b'r', b'rev', [], _(b'display markers relevant to REV')),
         (
-            '',
-            'exclusive',
+            b'',
+            b'exclusive',
             False,
-            _('restrict display to markers only ' 'relevant to REV'),
+            _(b'restrict display to markers only ' b'relevant to REV'),
         ),
-        ('', 'index', False, _('display index of the marker')),
-        ('', 'delete', [], _('delete markers specified by indices')),
+        (b'', b'index', False, _(b'display index of the marker')),
+        (b'', b'delete', [], _(b'delete markers specified by indices')),
     ]
     + cmdutil.commitopts2
     + cmdutil.formatteropts,
-    _('[OBSOLETED [REPLACEMENT ...]]'),
+    _(b'[OBSOLETED [REPLACEMENT ...]]'),
 )
 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
     """create arbitrary obsolete marker
@@ -2044,54 +2084,54 @@
             return n
         except TypeError:
             raise error.Abort(
-                'changeset references must be full hexadecimal '
-                'node identifiers'
+                b'changeset references must be full hexadecimal '
+                b'node identifiers'
             )
 
-    if opts.get('delete'):
+    if opts.get(b'delete'):
         indices = []
-        for v in opts.get('delete'):
+        for v in opts.get(b'delete'):
             try:
                 indices.append(int(v))
             except ValueError:
                 raise error.Abort(
-                    _('invalid index value: %r') % v,
-                    hint=_('use integers for indices'),
+                    _(b'invalid index value: %r') % v,
+                    hint=_(b'use integers for indices'),
                 )
 
         if repo.currenttransaction():
             raise error.Abort(
-                _('cannot delete obsmarkers in the middle ' 'of transaction.')
+                _(b'cannot delete obsmarkers in the middle ' b'of transaction.')
             )
 
         with repo.lock():
             n = repair.deleteobsmarkers(repo.obsstore, indices)
-            ui.write(_('deleted %i obsolescence markers\n') % n)
+            ui.write(_(b'deleted %i obsolescence markers\n') % n)
 
         return
 
     if precursor is not None:
-        if opts['rev']:
-            raise error.Abort('cannot select revision when creating marker')
+        if opts[b'rev']:
+            raise error.Abort(b'cannot select revision when creating marker')
         metadata = {}
-        metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
+        metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
         succs = tuple(parsenodeid(succ) for succ in successors)
         l = repo.lock()
         try:
-            tr = repo.transaction('debugobsolete')
+            tr = repo.transaction(b'debugobsolete')
             try:
-                date = opts.get('date')
+                date = opts.get(b'date')
                 if date:
                     date = dateutil.parsedate(date)
                 else:
                     date = None
                 prec = parsenodeid(precursor)
                 parents = None
-                if opts['record_parents']:
+                if opts[b'record_parents']:
                     if prec not in repo.unfiltered():
                         raise error.Abort(
-                            'cannot used --record-parents on '
-                            'unknown changesets'
+                            b'cannot used --record-parents on '
+                            b'unknown changesets'
                         )
                     parents = repo.unfiltered()[prec].parents()
                     parents = tuple(p.node() for p in parents)
@@ -2099,7 +2139,7 @@
                     tr,
                     prec,
                     succs,
-                    opts['flags'],
+                    opts[b'flags'],
                     parents=parents,
                     date=date,
                     metadata=metadata,
@@ -2108,19 +2148,19 @@
                 tr.close()
             except ValueError as exc:
                 raise error.Abort(
-                    _('bad obsmarker input: %s') % pycompat.bytestr(exc)
+                    _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
                 )
             finally:
                 tr.release()
         finally:
             l.release()
     else:
-        if opts['rev']:
-            revs = scmutil.revrange(repo, opts['rev'])
+        if opts[b'rev']:
+            revs = scmutil.revrange(repo, opts[b'rev'])
             nodes = [repo[r].node() for r in revs]
             markers = list(
                 obsutil.getmarkers(
-                    repo, nodes=nodes, exclusive=opts['exclusive']
+                    repo, nodes=nodes, exclusive=opts[b'exclusive']
                 )
             )
             markers.sort(key=lambda x: x._data)
@@ -2129,12 +2169,12 @@
 
         markerstoiter = markers
         isrelevant = lambda m: True
-        if opts.get('rev') and opts.get('index'):
+        if opts.get(b'rev') and opts.get(b'index'):
             markerstoiter = obsutil.getmarkers(repo)
             markerset = set(markers)
             isrelevant = lambda m: m in markerset
 
-        fm = ui.formatter('debugobsolete', opts)
+        fm = ui.formatter(b'debugobsolete', opts)
         for i, m in enumerate(markerstoiter):
             if not isrelevant(m):
                 # marker can be irrelevant when we're iterating over a set
@@ -2146,48 +2186,48 @@
                 # are relevant to --rev value
                 continue
             fm.startitem()
-            ind = i if opts.get('index') else None
+            ind = i if opts.get(b'index') else None
             cmdutil.showmarker(fm, m, index=ind)
         fm.end()
 
 
 @command(
-    'debugp1copies',
-    [('r', 'rev', '', _('revision to debug'), _('REV'))],
-    _('[-r REV]'),
+    b'debugp1copies',
+    [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
+    _(b'[-r REV]'),
 )
 def debugp1copies(ui, repo, **opts):
     """dump copy information compared to p1"""
 
     opts = pycompat.byteskwargs(opts)
-    ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
+    ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
     for dst, src in ctx.p1copies().items():
-        ui.write('%s -> %s\n' % (src, dst))
+        ui.write(b'%s -> %s\n' % (src, dst))
 
 
 @command(
-    'debugp2copies',
-    [('r', 'rev', '', _('revision to debug'), _('REV'))],
-    _('[-r REV]'),
+    b'debugp2copies',
+    [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
+    _(b'[-r REV]'),
 )
 def debugp1copies(ui, repo, **opts):
     """dump copy information compared to p2"""
 
     opts = pycompat.byteskwargs(opts)
-    ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
+    ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
     for dst, src in ctx.p2copies().items():
-        ui.write('%s -> %s\n' % (src, dst))
+        ui.write(b'%s -> %s\n' % (src, dst))
 
 
 @command(
-    'debugpathcomplete',
+    b'debugpathcomplete',
     [
-        ('f', 'full', None, _('complete an entire path')),
-        ('n', 'normal', None, _('show only normal files')),
-        ('a', 'added', None, _('show only added files')),
-        ('r', 'removed', None, _('show only removed files')),
+        (b'f', b'full', None, _(b'complete an entire path')),
+        (b'n', b'normal', None, _(b'show only normal files')),
+        (b'a', b'added', None, _(b'show only added files')),
+        (b'r', b'removed', None, _(b'show only removed files')),
     ],
-    _('FILESPEC...'),
+    _(b'FILESPEC...'),
 )
 def debugpathcomplete(ui, repo, *specs, **opts):
     '''complete part or all of a tracked path
@@ -2205,11 +2245,11 @@
         if spec != repo.root and not spec.startswith(rootdir):
             return [], []
         if os.path.isdir(spec):
-            spec += '/'
+            spec += b'/'
         spec = spec[len(rootdir) :]
-        fixpaths = pycompat.ossep != '/'
+        fixpaths = pycompat.ossep != b'/'
         if fixpaths:
-            spec = spec.replace(pycompat.ossep, '/')
+            spec = spec.replace(pycompat.ossep, b'/')
         speclen = len(spec)
         fullpaths = opts[r'full']
         files, dirs = set(), set()
@@ -2217,7 +2257,7 @@
         for f, st in dirstate.iteritems():
             if f.startswith(spec) and st[0] in acceptable:
                 if fixpaths:
-                    f = f.replace('/', pycompat.ossep)
+                    f = f.replace(b'/', pycompat.ossep)
                 if fullpaths:
                     addfile(f)
                     continue
@@ -2228,31 +2268,31 @@
                     addfile(f)
         return files, dirs
 
-    acceptable = ''
+    acceptable = b''
     if opts[r'normal']:
-        acceptable += 'nm'
+        acceptable += b'nm'
     if opts[r'added']:
-        acceptable += 'a'
+        acceptable += b'a'
     if opts[r'removed']:
-        acceptable += 'r'
+        acceptable += b'r'
     cwd = repo.getcwd()
     if not specs:
-        specs = ['.']
+        specs = [b'.']
 
     files, dirs = set(), set()
     for spec in specs:
-        f, d = complete(spec, acceptable or 'nmar')
+        f, d = complete(spec, acceptable or b'nmar')
         files.update(f)
         dirs.update(d)
     files.update(dirs)
-    ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
-    ui.write('\n')
+    ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
+    ui.write(b'\n')
 
 
 @command(
-    'debugpathcopies',
+    b'debugpathcopies',
     cmdutil.walkopts,
-    'hg debugpathcopies REV1 REV2 [FILE]',
+    b'hg debugpathcopies REV1 REV2 [FILE]',
     inferrepo=True,
 )
 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
@@ -2261,16 +2301,16 @@
     ctx2 = scmutil.revsingle(repo, rev2)
     m = scmutil.match(ctx1, pats, opts)
     for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
-        ui.write('%s -> %s\n' % (src, dst))
-
-
-@command('debugpeer', [], _('PATH'), norepo=True)
+        ui.write(b'%s -> %s\n' % (src, dst))
+
+
+@command(b'debugpeer', [], _(b'PATH'), norepo=True)
 def debugpeer(ui, path):
     """establish a connection to a peer repository"""
     # Always enable peer request logging. Requires --debug to display
     # though.
     overrides = {
-        ('devel', 'debug.peer-request'): True,
+        (b'devel', b'debug.peer-request'): True,
     }
 
     with ui.configoverride(overrides):
@@ -2279,20 +2319,20 @@
         local = peer.local() is not None
         canpush = peer.canpush()
 
-        ui.write(_('url: %s\n') % peer.url())
-        ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
-        ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
+        ui.write(_(b'url: %s\n') % peer.url())
+        ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
+        ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
 
 
 @command(
-    'debugpickmergetool',
+    b'debugpickmergetool',
     [
-        ('r', 'rev', '', _('check for files in this revision'), _('REV')),
-        ('', 'changedelete', None, _('emulate merging change and delete')),
+        (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
+        (b'', b'changedelete', None, _(b'emulate merging change and delete')),
     ]
     + cmdutil.walkopts
     + cmdutil.mergetoolopts,
-    _('[PATTERN]...'),
+    _(b'[PATTERN]...'),
     inferrepo=True,
 )
 def debugpickmergetool(ui, repo, *pats, **opts):
@@ -2339,21 +2379,21 @@
     """
     opts = pycompat.byteskwargs(opts)
     overrides = {}
-    if opts['tool']:
-        overrides[('ui', 'forcemerge')] = opts['tool']
-        ui.note('with --tool %r\n' % (pycompat.bytestr(opts['tool'])))
-
-    with ui.configoverride(overrides, 'debugmergepatterns'):
-        hgmerge = encoding.environ.get("HGMERGE")
+    if opts[b'tool']:
+        overrides[(b'ui', b'forcemerge')] = opts[b'tool']
+        ui.note(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
+
+    with ui.configoverride(overrides, b'debugmergepatterns'):
+        hgmerge = encoding.environ.get(b"HGMERGE")
         if hgmerge is not None:
-            ui.note('with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
-        uimerge = ui.config("ui", "merge")
+            ui.note(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
+        uimerge = ui.config(b"ui", b"merge")
         if uimerge:
-            ui.note('with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
-
-        ctx = scmutil.revsingle(repo, opts.get('rev'))
+            ui.note(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
+
+        ctx = scmutil.revsingle(repo, opts.get(b'rev'))
         m = scmutil.match(ctx, pats, opts)
-        changedelete = opts['changedelete']
+        changedelete = opts[b'changedelete']
         for path in ctx.walk(m):
             fctx = ctx[path]
             try:
@@ -2364,16 +2404,16 @@
                     ui,
                     path,
                     fctx.isbinary(),
-                    'l' in fctx.flags(),
+                    b'l' in fctx.flags(),
                     changedelete,
                 )
             finally:
                 if not ui.debugflag:
                     ui.popbuffer()
-            ui.write('%s = %s\n' % (path, tool))
-
-
-@command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
+            ui.write(b'%s = %s\n' % (path, tool))
+
+
+@command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
     '''access the pushkey key/value protocol
 
@@ -2388,38 +2428,43 @@
         key, old, new = keyinfo
         with target.commandexecutor() as e:
             r = e.callcommand(
-                'pushkey',
-                {'namespace': namespace, 'key': key, 'old': old, 'new': new,},
+                b'pushkey',
+                {
+                    b'namespace': namespace,
+                    b'key': key,
+                    b'old': old,
+                    b'new': new,
+                },
             ).result()
 
-        ui.status(pycompat.bytestr(r) + '\n')
+        ui.status(pycompat.bytestr(r) + b'\n')
         return not r
     else:
         for k, v in sorted(target.listkeys(namespace).iteritems()):
             ui.write(
-                "%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
+                b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
             )
 
 
-@command('debugpvec', [], _('A B'))
+@command(b'debugpvec', [], _(b'A B'))
 def debugpvec(ui, repo, a, b=None):
     ca = scmutil.revsingle(repo, a)
     cb = scmutil.revsingle(repo, b)
     pa = pvec.ctxpvec(ca)
     pb = pvec.ctxpvec(cb)
     if pa == pb:
-        rel = "="
+        rel = b"="
     elif pa > pb:
-        rel = ">"
+        rel = b">"
     elif pa < pb:
-        rel = "<"
+        rel = b"<"
     elif pa | pb:
-        rel = "|"
-    ui.write(_("a: %s\n") % pa)
-    ui.write(_("b: %s\n") % pb)
-    ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
+        rel = b"|"
+    ui.write(_(b"a: %s\n") % pa)
+    ui.write(_(b"b: %s\n") % pb)
+    ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
     ui.write(
-        _("delta: %d hdist: %d distance: %d relation: %s\n")
+        _(b"delta: %d hdist: %d distance: %d relation: %s\n")
         % (
             abs(pa._depth - pb._depth),
             pvec._hamming(pa._vec, pb._vec),
@@ -2430,20 +2475,20 @@
 
 
 @command(
-    'debugrebuilddirstate|debugrebuildstate',
+    b'debugrebuilddirstate|debugrebuildstate',
     [
-        ('r', 'rev', '', _('revision to rebuild to'), _('REV')),
+        (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
         (
-            '',
-            'minimal',
+            b'',
+            b'minimal',
             None,
             _(
-                'only rebuild files that are inconsistent with '
-                'the working copy parent'
+                b'only rebuild files that are inconsistent with '
+                b'the working copy parent'
             ),
         ),
     ],
-    _('[-r REV]'),
+    _(b'[-r REV]'),
 )
 def debugrebuilddirstate(ui, repo, rev, **opts):
     """rebuild the dirstate as it would look like for the given revision
@@ -2472,56 +2517,56 @@
             dirstatefiles = set(dirstate)
             manifestonly = manifestfiles - dirstatefiles
             dsonly = dirstatefiles - manifestfiles
-            dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
+            dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
             changedfiles = manifestonly | dsnotadded
 
         dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
 
 
-@command('debugrebuildfncache', [], '')
+@command(b'debugrebuildfncache', [], b'')
 def debugrebuildfncache(ui, repo):
     """rebuild the fncache file"""
     repair.rebuildfncache(ui, repo)
 
 
 @command(
-    'debugrename',
-    [('r', 'rev', '', _('revision to debug'), _('REV'))],
-    _('[-r REV] [FILE]...'),
+    b'debugrename',
+    [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
+    _(b'[-r REV] [FILE]...'),
 )
 def debugrename(ui, repo, *pats, **opts):
     """dump rename information"""
 
     opts = pycompat.byteskwargs(opts)
-    ctx = scmutil.revsingle(repo, opts.get('rev'))
+    ctx = scmutil.revsingle(repo, opts.get(b'rev'))
     m = scmutil.match(ctx, pats, opts)
     for abs in ctx.walk(m):
         fctx = ctx[abs]
         o = fctx.filelog().renamed(fctx.filenode())
         rel = repo.pathto(abs)
         if o:
-            ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
+            ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
         else:
-            ui.write(_("%s not renamed\n") % rel)
+            ui.write(_(b"%s not renamed\n") % rel)
 
 
 @command(
-    'debugrevlog',
-    cmdutil.debugrevlogopts + [('d', 'dump', False, _('dump index data'))],
-    _('-c|-m|FILE'),
+    b'debugrevlog',
+    cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
+    _(b'-c|-m|FILE'),
     optionalrepo=True,
 )
 def debugrevlog(ui, repo, file_=None, **opts):
     """show data and statistics about a revlog"""
     opts = pycompat.byteskwargs(opts)
-    r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
-
-    if opts.get("dump"):
+    r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
+
+    if opts.get(b"dump"):
         numrevs = len(r)
         ui.write(
             (
-                "# rev p1rev p2rev start   end deltastart base   p1   p2"
-                " rawsize totalsize compression heads chainlen\n"
+                b"# rev p1rev p2rev start   end deltastart base   p1   p2"
+                b" rawsize totalsize compression heads chainlen\n"
             )
         )
         ts = 0
@@ -2543,8 +2588,8 @@
             except ZeroDivisionError:
                 compression = 0
             ui.write(
-                "%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
-                "%11d %5d %8d\n"
+                b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
+                b"%11d %5d %8d\n"
                 % (
                     rev,
                     p1,
@@ -2569,12 +2614,12 @@
     flags = []
     gdelta = False
     if v & revlog.FLAG_INLINE_DATA:
-        flags.append('inline')
+        flags.append(b'inline')
     if v & revlog.FLAG_GENERALDELTA:
         gdelta = True
-        flags.append('generaldelta')
+        flags.append(b'generaldelta')
     if not flags:
-        flags = ['(none)']
+        flags = [b'(none)']
 
     ### tracks merge vs single parent
     nummerges = 0
@@ -2676,14 +2721,14 @@
                     numother += 1
 
         # Obtain data on the raw chunks in the revlog.
-        if util.safehasattr(r, '_getsegmentforrevs'):
+        if util.safehasattr(r, b'_getsegmentforrevs'):
             segment = r._getsegmentforrevs(rev, rev)[1]
         else:
             segment = r._revlog._getsegmentforrevs(rev, rev)[1]
         if segment:
             chunktype = bytes(segment[0:1])
         else:
-            chunktype = 'empty'
+            chunktype = b'empty'
 
         if chunktype not in chunktypecounts:
             chunktypecounts[chunktype] = 0
@@ -2725,14 +2770,14 @@
     if totalsize:
         compratio = totalrawsize / totalsize
 
-    basedfmtstr = '%%%dd\n'
-    basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
+    basedfmtstr = b'%%%dd\n'
+    basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
 
     def dfmtstr(max):
         return basedfmtstr % len(str(max))
 
     def pcfmtstr(max, padding=0):
-        return basepcfmtstr % (len(str(max)), ' ' * padding)
+        return basepcfmtstr % (len(str(max)), b' ' * padding)
 
     def pcfmt(value, total):
         if total:
@@ -2740,129 +2785,131 @@
         else:
             return value, 100.0
 
-    ui.write('format : %d\n' % format)
-    ui.write('flags  : %s\n' % ', '.join(flags))
-
-    ui.write('\n')
+    ui.write(b'format : %d\n' % format)
+    ui.write(b'flags  : %s\n' % b', '.join(flags))
+
+    ui.write(b'\n')
     fmt = pcfmtstr(totalsize)
     fmt2 = dfmtstr(totalsize)
-    ui.write('revisions     : ' + fmt2 % numrevs)
-    ui.write('    merges    : ' + fmt % pcfmt(nummerges, numrevs))
-    ui.write('    normal    : ' + fmt % pcfmt(numrevs - nummerges, numrevs))
-    ui.write('revisions     : ' + fmt2 % numrevs)
-    ui.write('    empty     : ' + fmt % pcfmt(numempty, numrevs))
+    ui.write(b'revisions     : ' + fmt2 % numrevs)
+    ui.write(b'    merges    : ' + fmt % pcfmt(nummerges, numrevs))
+    ui.write(b'    normal    : ' + fmt % pcfmt(numrevs - nummerges, numrevs))
+    ui.write(b'revisions     : ' + fmt2 % numrevs)
+    ui.write(b'    empty     : ' + fmt % pcfmt(numempty, numrevs))
     ui.write(
-        '                   text  : '
+        b'                   text  : '
         + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
     )
     ui.write(
-        '                   delta : '
+        b'                   delta : '
         + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
     )
-    ui.write('    snapshot  : ' + fmt % pcfmt(numfull + numsemi, numrevs))
+    ui.write(b'    snapshot  : ' + fmt % pcfmt(numfull + numsemi, numrevs))
     for depth in sorted(numsnapdepth):
         ui.write(
-            ('      lvl-%-3d :       ' % depth)
+            (b'      lvl-%-3d :       ' % depth)
             + fmt % pcfmt(numsnapdepth[depth], numrevs)
         )
-    ui.write('    deltas    : ' + fmt % pcfmt(numdeltas, numrevs))
-    ui.write('revision size : ' + fmt2 % totalsize)
-    ui.write('    snapshot  : ' + fmt % pcfmt(fulltotal + semitotal, totalsize))
+    ui.write(b'    deltas    : ' + fmt % pcfmt(numdeltas, numrevs))
+    ui.write(b'revision size : ' + fmt2 % totalsize)
+    ui.write(
+        b'    snapshot  : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
+    )
     for depth in sorted(numsnapdepth):
         ui.write(
-            ('      lvl-%-3d :       ' % depth)
+            (b'      lvl-%-3d :       ' % depth)
             + fmt % pcfmt(snaptotal[depth], totalsize)
         )
-    ui.write('    deltas    : ' + fmt % pcfmt(deltatotal, totalsize))
+    ui.write(b'    deltas    : ' + fmt % pcfmt(deltatotal, totalsize))
 
     def fmtchunktype(chunktype):
-        if chunktype == 'empty':
-            return '    %s     : ' % chunktype
+        if chunktype == b'empty':
+            return b'    %s     : ' % chunktype
         elif chunktype in pycompat.bytestr(string.ascii_letters):
-            return '    0x%s (%s)  : ' % (hex(chunktype), chunktype)
+            return b'    0x%s (%s)  : ' % (hex(chunktype), chunktype)
         else:
-            return '    0x%s      : ' % hex(chunktype)
-
-    ui.write('\n')
-    ui.write('chunks        : ' + fmt2 % numrevs)
+            return b'    0x%s      : ' % hex(chunktype)
+
+    ui.write(b'\n')
+    ui.write(b'chunks        : ' + fmt2 % numrevs)
     for chunktype in sorted(chunktypecounts):
         ui.write(fmtchunktype(chunktype))
         ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
-    ui.write('chunks size   : ' + fmt2 % totalsize)
+    ui.write(b'chunks size   : ' + fmt2 % totalsize)
     for chunktype in sorted(chunktypecounts):
         ui.write(fmtchunktype(chunktype))
         ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
 
-    ui.write('\n')
+    ui.write(b'\n')
     fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
-    ui.write('avg chain length  : ' + fmt % avgchainlen)
-    ui.write('max chain length  : ' + fmt % maxchainlen)
-    ui.write('max chain reach   : ' + fmt % maxchainspan)
-    ui.write('compression ratio : ' + fmt % compratio)
+    ui.write(b'avg chain length  : ' + fmt % avgchainlen)
+    ui.write(b'max chain length  : ' + fmt % maxchainlen)
+    ui.write(b'max chain reach   : ' + fmt % maxchainspan)
+    ui.write(b'compression ratio : ' + fmt % compratio)
 
     if format > 0:
-        ui.write('\n')
+        ui.write(b'\n')
         ui.write(
-            'uncompressed data size (min/max/avg) : %d / %d / %d\n'
+            b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
             % tuple(datasize)
         )
     ui.write(
-        'full revision size (min/max/avg)     : %d / %d / %d\n'
+        b'full revision size (min/max/avg)     : %d / %d / %d\n'
         % tuple(fullsize)
     )
     ui.write(
-        'inter-snapshot size (min/max/avg)    : %d / %d / %d\n'
+        b'inter-snapshot size (min/max/avg)    : %d / %d / %d\n'
         % tuple(semisize)
     )
     for depth in sorted(snapsizedepth):
         if depth == 0:
             continue
         ui.write(
-            '    level-%-3d (min/max/avg)          : %d / %d / %d\n'
+            b'    level-%-3d (min/max/avg)          : %d / %d / %d\n'
             % ((depth,) + tuple(snapsizedepth[depth]))
         )
     ui.write(
-        'delta size (min/max/avg)             : %d / %d / %d\n'
+        b'delta size (min/max/avg)             : %d / %d / %d\n'
         % tuple(deltasize)
     )
 
     if numdeltas > 0:
-        ui.write('\n')
+        ui.write(b'\n')
         fmt = pcfmtstr(numdeltas)
         fmt2 = pcfmtstr(numdeltas, 4)
-        ui.write('deltas against prev  : ' + fmt % pcfmt(numprev, numdeltas))
+        ui.write(b'deltas against prev  : ' + fmt % pcfmt(numprev, numdeltas))
         if numprev > 0:
             ui.write(
-                '    where prev = p1  : ' + fmt2 % pcfmt(nump1prev, numprev)
+                b'    where prev = p1  : ' + fmt2 % pcfmt(nump1prev, numprev)
             )
             ui.write(
-                '    where prev = p2  : ' + fmt2 % pcfmt(nump2prev, numprev)
+                b'    where prev = p2  : ' + fmt2 % pcfmt(nump2prev, numprev)
             )
             ui.write(
-                '    other            : ' + fmt2 % pcfmt(numoprev, numprev)
+                b'    other            : ' + fmt2 % pcfmt(numoprev, numprev)
             )
         if gdelta:
-            ui.write('deltas against p1    : ' + fmt % pcfmt(nump1, numdeltas))
-            ui.write('deltas against p2    : ' + fmt % pcfmt(nump2, numdeltas))
+            ui.write(b'deltas against p1    : ' + fmt % pcfmt(nump1, numdeltas))
+            ui.write(b'deltas against p2    : ' + fmt % pcfmt(nump2, numdeltas))
             ui.write(
-                'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
+                b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
             )
 
 
 @command(
-    'debugrevlogindex',
+    b'debugrevlogindex',
     cmdutil.debugrevlogopts
-    + [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
-    _('[-f FORMAT] -c|-m|FILE'),
+    + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
+    _(b'[-f FORMAT] -c|-m|FILE'),
     optionalrepo=True,
 )
 def debugrevlogindex(ui, repo, file_=None, **opts):
     """dump the contents of a revlog index"""
     opts = pycompat.byteskwargs(opts)
-    r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
-    format = opts.get('format', 0)
+    r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
+    format = opts.get(b'format', 0)
     if format not in (0, 1):
-        raise error.Abort(_("unknown format %d") % format)
+        raise error.Abort(_(b"unknown format %d") % format)
 
     if ui.debugflag:
         shortfn = hex
@@ -2878,27 +2925,27 @@
     if format == 0:
         if ui.verbose:
             ui.write(
-                ("   rev    offset  length linkrev" " %s %s p2\n")
-                % ("nodeid".ljust(idlen), "p1".ljust(idlen))
+                (b"   rev    offset  length linkrev" b" %s %s p2\n")
+                % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
             )
         else:
             ui.write(
-                "   rev linkrev %s %s p2\n"
-                % ("nodeid".ljust(idlen), "p1".ljust(idlen))
+                b"   rev linkrev %s %s p2\n"
+                % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
             )
     elif format == 1:
         if ui.verbose:
             ui.write(
                 (
-                    "   rev flag   offset   length     size   link     p1"
-                    "     p2 %s\n"
+                    b"   rev flag   offset   length     size   link     p1"
+                    b"     p2 %s\n"
                 )
-                % "nodeid".rjust(idlen)
+                % b"nodeid".rjust(idlen)
             )
         else:
             ui.write(
-                "   rev flag     size   link     p1     p2 %s\n"
-                % "nodeid".rjust(idlen)
+                b"   rev flag     size   link     p1     p2 %s\n"
+                % b"nodeid".rjust(idlen)
             )
 
     for i in r:
@@ -2910,7 +2957,7 @@
                 pp = [nullid, nullid]
             if ui.verbose:
                 ui.write(
-                    "% 6d % 9d % 7d % 7d %s %s %s\n"
+                    b"% 6d % 9d % 7d % 7d %s %s %s\n"
                     % (
                         i,
                         r.start(i),
@@ -2923,7 +2970,7 @@
                 )
             else:
                 ui.write(
-                    "% 6d % 7d %s %s %s\n"
+                    b"% 6d % 7d %s %s %s\n"
                     % (
                         i,
                         r.linkrev(i),
@@ -2936,7 +2983,7 @@
             pr = r.parentrevs(i)
             if ui.verbose:
                 ui.write(
-                    "% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
+                    b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
                     % (
                         i,
                         r.flags(i),
@@ -2951,7 +2998,7 @@
                 )
             else:
                 ui.write(
-                    "% 6d %04x % 8d % 6d % 6d % 6d %s\n"
+                    b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
                     % (
                         i,
                         r.flags(i),
@@ -2965,32 +3012,37 @@
 
 
 @command(
-    'debugrevspec',
+    b'debugrevspec',
     [
         (
-            '',
-            'optimize',
+            b'',
+            b'optimize',
             None,
-            _('print parsed tree after optimizing (DEPRECATED)'),
+            _(b'print parsed tree after optimizing (DEPRECATED)'),
         ),
-        ('', 'show-revs', True, _('print list of result revisions (default)')),
         (
-            's',
-            'show-set',
-            None,
-            _('print internal representation of result set'),
+            b'',
+            b'show-revs',
+            True,
+            _(b'print list of result revisions (default)'),
         ),
         (
-            'p',
-            'show-stage',
+            b's',
+            b'show-set',
+            None,
+            _(b'print internal representation of result set'),
+        ),
+        (
+            b'p',
+            b'show-stage',
             [],
-            _('print parsed tree at the given stage'),
-            _('NAME'),
+            _(b'print parsed tree at the given stage'),
+            _(b'NAME'),
         ),
-        ('', 'no-optimized', False, _('evaluate tree without optimization')),
-        ('', 'verify-optimized', False, _('verify optimized result')),
+        (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
+        (b'', b'verify-optimized', False, _(b'verify optimized result')),
     ],
-    'REVSPEC',
+    b'REVSPEC',
 )
 def debugrevspec(ui, repo, expr, **opts):
     """parse and apply a revision specification
@@ -3005,42 +3057,42 @@
     one. Returns 1 if the optimized result differs.
     """
     opts = pycompat.byteskwargs(opts)
-    aliases = ui.configitems('revsetalias')
+    aliases = ui.configitems(b'revsetalias')
     stages = [
-        ('parsed', lambda tree: tree),
+        (b'parsed', lambda tree: tree),
         (
-            'expanded',
+            b'expanded',
             lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
         ),
-        ('concatenated', revsetlang.foldconcat),
-        ('analyzed', revsetlang.analyze),
-        ('optimized', revsetlang.optimize),
+        (b'concatenated', revsetlang.foldconcat),
+        (b'analyzed', revsetlang.analyze),
+        (b'optimized', revsetlang.optimize),
     ]
-    if opts['no_optimized']:
+    if opts[b'no_optimized']:
         stages = stages[:-1]
-    if opts['verify_optimized'] and opts['no_optimized']:
+    if opts[b'verify_optimized'] and opts[b'no_optimized']:
         raise error.Abort(
-            _('cannot use --verify-optimized with ' '--no-optimized')
+            _(b'cannot use --verify-optimized with ' b'--no-optimized')
         )
     stagenames = set(n for n, f in stages)
 
     showalways = set()
     showchanged = set()
-    if ui.verbose and not opts['show_stage']:
+    if ui.verbose and not opts[b'show_stage']:
         # show parsed tree by --verbose (deprecated)
-        showalways.add('parsed')
-        showchanged.update(['expanded', 'concatenated'])
-        if opts['optimize']:
-            showalways.add('optimized')
-    if opts['show_stage'] and opts['optimize']:
-        raise error.Abort(_('cannot use --optimize with --show-stage'))
-    if opts['show_stage'] == ['all']:
+        showalways.add(b'parsed')
+        showchanged.update([b'expanded', b'concatenated'])
+        if opts[b'optimize']:
+            showalways.add(b'optimized')
+    if opts[b'show_stage'] and opts[b'optimize']:
+        raise error.Abort(_(b'cannot use --optimize with --show-stage'))
+    if opts[b'show_stage'] == [b'all']:
         showalways.update(stagenames)
     else:
-        for n in opts['show_stage']:
+        for n in opts[b'show_stage']:
             if n not in stagenames:
-                raise error.Abort(_('invalid stage name: %s') % n)
-        showalways.update(opts['show_stage'])
+                raise error.Abort(_(b'invalid stage name: %s') % n)
+        showalways.update(opts[b'show_stage'])
 
     treebystage = {}
     printedtree = None
@@ -3048,59 +3100,59 @@
     for n, f in stages:
         treebystage[n] = tree = f(tree)
         if n in showalways or (n in showchanged and tree != printedtree):
-            if opts['show_stage'] or n != 'parsed':
-                ui.write("* %s:\n" % n)
-            ui.write(revsetlang.prettyformat(tree), "\n")
+            if opts[b'show_stage'] or n != b'parsed':
+                ui.write(b"* %s:\n" % n)
+            ui.write(revsetlang.prettyformat(tree), b"\n")
             printedtree = tree
 
-    if opts['verify_optimized']:
-        arevs = revset.makematcher(treebystage['analyzed'])(repo)
-        brevs = revset.makematcher(treebystage['optimized'])(repo)
-        if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
-            ui.write("* analyzed set:\n", stringutil.prettyrepr(arevs), "\n")
-            ui.write("* optimized set:\n", stringutil.prettyrepr(brevs), "\n")
+    if opts[b'verify_optimized']:
+        arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
+        brevs = revset.makematcher(treebystage[b'optimized'])(repo)
+        if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
+            ui.write(b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n")
+            ui.write(b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n")
         arevs = list(arevs)
         brevs = list(brevs)
         if arevs == brevs:
             return 0
-        ui.write('--- analyzed\n', label='diff.file_a')
-        ui.write('+++ optimized\n', label='diff.file_b')
+        ui.write(b'--- analyzed\n', label=b'diff.file_a')
+        ui.write(b'+++ optimized\n', label=b'diff.file_b')
         sm = difflib.SequenceMatcher(None, arevs, brevs)
         for tag, alo, ahi, blo, bhi in sm.get_opcodes():
             if tag in (r'delete', r'replace'):
                 for c in arevs[alo:ahi]:
-                    ui.write('-%d\n' % c, label='diff.deleted')
+                    ui.write(b'-%d\n' % c, label=b'diff.deleted')
             if tag in (r'insert', r'replace'):
                 for c in brevs[blo:bhi]:
-                    ui.write('+%d\n' % c, label='diff.inserted')
+                    ui.write(b'+%d\n' % c, label=b'diff.inserted')
             if tag == r'equal':
                 for c in arevs[alo:ahi]:
-                    ui.write(' %d\n' % c)
+                    ui.write(b' %d\n' % c)
         return 1
 
     func = revset.makematcher(tree)
     revs = func(repo)
-    if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
-        ui.write("* set:\n", stringutil.prettyrepr(revs), "\n")
-    if not opts['show_revs']:
+    if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
+        ui.write(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
+    if not opts[b'show_revs']:
         return
     for c in revs:
-        ui.write("%d\n" % c)
+        ui.write(b"%d\n" % c)
 
 
 @command(
-    'debugserve',
+    b'debugserve',
     [
         (
-            '',
-            'sshstdio',
+            b'',
+            b'sshstdio',
             False,
-            _('run an SSH server bound to process handles'),
+            _(b'run an SSH server bound to process handles'),
         ),
-        ('', 'logiofd', '', _('file descriptor to log server I/O to')),
-        ('', 'logiofile', '', _('file to log server I/O to')),
+        (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
+        (b'', b'logiofile', b'', _(b'file to log server I/O to')),
     ],
-    '',
+    b'',
 )
 def debugserve(ui, repo, **opts):
     """run a server with advanced settings
@@ -3111,31 +3163,31 @@
     """
     opts = pycompat.byteskwargs(opts)
 
-    if not opts['sshstdio']:
-        raise error.Abort(_('only --sshstdio is currently supported'))
+    if not opts[b'sshstdio']:
+        raise error.Abort(_(b'only --sshstdio is currently supported'))
 
     logfh = None
 
-    if opts['logiofd'] and opts['logiofile']:
-        raise error.Abort(_('cannot use both --logiofd and --logiofile'))
-
-    if opts['logiofd']:
+    if opts[b'logiofd'] and opts[b'logiofile']:
+        raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
+
+    if opts[b'logiofd']:
         # Line buffered because output is line based.
         try:
-            logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
+            logfh = os.fdopen(int(opts[b'logiofd']), r'ab', 1)
         except OSError as e:
             if e.errno != errno.ESPIPE:
                 raise
             # can't seek a pipe, so `ab` mode fails on py3
-            logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
-    elif opts['logiofile']:
-        logfh = open(opts['logiofile'], 'ab', 1)
+            logfh = os.fdopen(int(opts[b'logiofd']), r'wb', 1)
+    elif opts[b'logiofile']:
+        logfh = open(opts[b'logiofile'], b'ab', 1)
 
     s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
     s.serve_forever()
 
 
-@command('debugsetparents', [], _('REV1 [REV2]'))
+@command(b'debugsetparents', [], _(b'REV1 [REV2]'))
 def debugsetparents(ui, repo, rev1, rev2=None):
     """manually set the parents of the current working directory
 
@@ -3148,39 +3200,39 @@
     """
 
     node1 = scmutil.revsingle(repo, rev1).node()
-    node2 = scmutil.revsingle(repo, rev2, 'null').node()
+    node2 = scmutil.revsingle(repo, rev2, b'null').node()
 
     with repo.wlock():
         repo.setparents(node1, node2)
 
 
-@command('debugsidedata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
+@command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
 def debugsidedata(ui, repo, file_, rev=None, **opts):
     """dump the side data for a cl/manifest/file revision"""
     opts = pycompat.byteskwargs(opts)
-    if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
+    if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
         if rev is not None:
-            raise error.CommandError('debugdata', _('invalid arguments'))
+            raise error.CommandError(b'debugdata', _(b'invalid arguments'))
         file_, rev = None, file_
     elif rev is None:
-        raise error.CommandError('debugdata', _('invalid arguments'))
-    r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
+        raise error.CommandError(b'debugdata', _(b'invalid arguments'))
+    r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
     r = getattr(r, '_revlog', r)
     try:
         sidedata = r.sidedata(r.lookup(rev))
     except KeyError:
-        raise error.Abort(_('invalid revision identifier %s') % rev)
+        raise error.Abort(_(b'invalid revision identifier %s') % rev)
     if sidedata:
         sidedata = list(sidedata.items())
         sidedata.sort()
-        ui.write(('%d sidedata entries\n' % len(sidedata)))
+        ui.write((b'%d sidedata entries\n' % len(sidedata)))
         for key, value in sidedata:
-            ui.write((' entry-%04o size %d\n' % (key, len(value))))
+            ui.write((b' entry-%04o size %d\n' % (key, len(value))))
             if ui.verbose:
-                ui.write(('  %s\n' % stringutil.pprint(value)))
-
-
-@command('debugssl', [], '[SOURCE]', optionalrepo=True)
+                ui.write((b'  %s\n' % stringutil.pprint(value)))
+
+
+@command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
 def debugssl(ui, repo, source=None, **opts):
     '''test a secure connection to a server
 
@@ -3196,30 +3248,30 @@
     '''
     if not pycompat.iswindows:
         raise error.Abort(
-            _('certificate chain building is only possible on ' 'Windows')
+            _(b'certificate chain building is only possible on ' b'Windows')
         )
 
     if not source:
         if not repo:
             raise error.Abort(
                 _(
-                    "there is no Mercurial repository here, and no "
-                    "server specified"
+                    b"there is no Mercurial repository here, and no "
+                    b"server specified"
                 )
             )
-        source = "default"
+        source = b"default"
 
     source, branches = hg.parseurl(ui.expandpath(source))
     url = util.url(source)
 
-    defaultport = {'https': 443, 'ssh': 22}
+    defaultport = {b'https': 443, b'ssh': 22}
     if url.scheme in defaultport:
         try:
             addr = (url.host, int(url.port or defaultport[url.scheme]))
         except ValueError:
-            raise error.Abort(_("malformed port number in URL"))
+            raise error.Abort(_(b"malformed port number in URL"))
     else:
-        raise error.Abort(_("only https and ssh connections are supported"))
+        raise error.Abort(_(b"only https and ssh connections are supported"))
 
     from . import win32
 
@@ -3234,40 +3286,40 @@
         s.connect(addr)
         cert = s.getpeercert(True)
 
-        ui.status(_('checking the certificate chain for %s\n') % url.host)
+        ui.status(_(b'checking the certificate chain for %s\n') % url.host)
 
         complete = win32.checkcertificatechain(cert, build=False)
 
         if not complete:
-            ui.status(_('certificate chain is incomplete, updating... '))
+            ui.status(_(b'certificate chain is incomplete, updating... '))
 
             if not win32.checkcertificatechain(cert):
-                ui.status(_('failed.\n'))
+                ui.status(_(b'failed.\n'))
             else:
-                ui.status(_('done.\n'))
+                ui.status(_(b'done.\n'))
         else:
-            ui.status(_('full certificate chain is available\n'))
+            ui.status(_(b'full certificate chain is available\n'))
     finally:
         s.close()
 
 
 @command(
-    'debugsub',
-    [('r', 'rev', '', _('revision to check'), _('REV'))],
-    _('[-r REV] [REV]'),
+    b'debugsub',
+    [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
+    _(b'[-r REV] [REV]'),
 )
 def debugsub(ui, repo, rev=None):
     ctx = scmutil.revsingle(repo, rev, None)
     for k, v in sorted(ctx.substate.items()):
-        ui.write('path %s\n' % k)
-        ui.write(' source   %s\n' % v[0])
-        ui.write(' revision %s\n' % v[1])
+        ui.write(b'path %s\n' % k)
+        ui.write(b' source   %s\n' % v[0])
+        ui.write(b' revision %s\n' % v[1])
 
 
 @command(
-    'debugsuccessorssets',
-    [('', 'closest', False, _('return closest successors sets only'))],
-    _('[REV]'),
+    b'debugsuccessorssets',
+    [(b'', b'closest', False, _(b'return closest successors sets only'))],
+    _(b'[REV]'),
 )
 def debugsuccessorssets(ui, repo, *revs, **opts):
     """show set of successors for revision
@@ -3307,26 +3359,26 @@
     node2str = short
     for rev in scmutil.revrange(repo, revs):
         ctx = repo[rev]
-        ui.write('%s\n' % ctx2str(ctx))
+        ui.write(b'%s\n' % ctx2str(ctx))
         for succsset in obsutil.successorssets(
             repo, ctx.node(), closest=opts[r'closest'], cache=cache
         ):
             if succsset:
-                ui.write('    ')
+                ui.write(b'    ')
                 ui.write(node2str(succsset[0]))
                 for node in succsset[1:]:
-                    ui.write(' ')
+                    ui.write(b' ')
                     ui.write(node2str(node))
-            ui.write('\n')
+            ui.write(b'\n')
 
 
 @command(
-    'debugtemplate',
+    b'debugtemplate',
     [
-        ('r', 'rev', [], _('apply template on changesets'), _('REV')),
-        ('D', 'define', [], _('define template keyword'), _('KEY=VALUE')),
+        (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
+        (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
     ],
-    _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
+    _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
     optionalrepo=True,
 )
 def debugtemplate(ui, repo, tmpl, **opts):
@@ -3342,72 +3394,72 @@
     if opts[r'rev']:
         if repo is None:
             raise error.RepoError(
-                _('there is no Mercurial repository here ' '(.hg not found)')
+                _(b'there is no Mercurial repository here ' b'(.hg not found)')
             )
         revs = scmutil.revrange(repo, opts[r'rev'])
 
     props = {}
     for d in opts[r'define']:
         try:
-            k, v = (e.strip() for e in d.split('=', 1))
-            if not k or k == 'ui':
+            k, v = (e.strip() for e in d.split(b'=', 1))
+            if not k or k == b'ui':
                 raise ValueError
             props[k] = v
         except ValueError:
-            raise error.Abort(_('malformed keyword definition: %s') % d)
+            raise error.Abort(_(b'malformed keyword definition: %s') % d)
 
     if ui.verbose:
-        aliases = ui.configitems('templatealias')
+        aliases = ui.configitems(b'templatealias')
         tree = templater.parse(tmpl)
-        ui.note(templater.prettyformat(tree), '\n')
+        ui.note(templater.prettyformat(tree), b'\n')
         newtree = templater.expandaliases(tree, aliases)
         if newtree != tree:
-            ui.note("* expanded:\n", templater.prettyformat(newtree), '\n')
+            ui.note(b"* expanded:\n", templater.prettyformat(newtree), b'\n')
 
     if revs is None:
         tres = formatter.templateresources(ui, repo)
         t = formatter.maketemplater(ui, tmpl, resources=tres)
         if ui.verbose:
             kwds, funcs = t.symbolsuseddefault()
-            ui.write("* keywords: %s\n" % ', '.join(sorted(kwds)))
-            ui.write("* functions: %s\n" % ', '.join(sorted(funcs)))
+            ui.write(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
+            ui.write(b"* functions: %s\n" % b', '.join(sorted(funcs)))
         ui.write(t.renderdefault(props))
     else:
         displayer = logcmdutil.maketemplater(ui, repo, tmpl)
         if ui.verbose:
             kwds, funcs = displayer.t.symbolsuseddefault()
-            ui.write("* keywords: %s\n" % ', '.join(sorted(kwds)))
-            ui.write("* functions: %s\n" % ', '.join(sorted(funcs)))
+            ui.write(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
+            ui.write(b"* functions: %s\n" % b', '.join(sorted(funcs)))
         for r in revs:
             displayer.show(repo[r], **pycompat.strkwargs(props))
         displayer.close()
 
 
 @command(
-    'debuguigetpass',
-    [('p', 'prompt', '', _('prompt text'), _('TEXT')),],
-    _('[-p TEXT]'),
+    b'debuguigetpass',
+    [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
+    _(b'[-p TEXT]'),
     norepo=True,
 )
-def debuguigetpass(ui, prompt=''):
+def debuguigetpass(ui, prompt=b''):
     """show prompt to type password"""
     r = ui.getpass(prompt)
-    ui.write('respose: %s\n' % r)
+    ui.write(b'respose: %s\n' % r)
 
 
 @command(
-    'debuguiprompt',
-    [('p', 'prompt', '', _('prompt text'), _('TEXT')),],
-    _('[-p TEXT]'),
+    b'debuguiprompt',
+    [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
+    _(b'[-p TEXT]'),
     norepo=True,
 )
-def debuguiprompt(ui, prompt=''):
+def debuguiprompt(ui, prompt=b''):
     """show plain prompt"""
     r = ui.prompt(prompt)
-    ui.write('response: %s\n' % r)
-
-
-@command('debugupdatecaches', [])
+    ui.write(b'response: %s\n' % r)
+
+
+@command(b'debugupdatecaches', [])
 def debugupdatecaches(ui, repo, *pats, **opts):
     """warm all known caches in the repository"""
     with repo.wlock(), repo.lock():
@@ -3415,13 +3467,19 @@
 
 
 @command(
-    'debugupgraderepo',
+    b'debugupgraderepo',
     [
-        ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
-        ('', 'run', False, _('performs an upgrade')),
-        ('', 'backup', True, _('keep the old repository content around')),
-        ('', 'changelog', None, _('select the changelog for upgrade')),
-        ('', 'manifest', None, _('select the manifest for upgrade')),
+        (
+            b'o',
+            b'optimize',
+            [],
+            _(b'extra optimization to perform'),
+            _(b'NAME'),
+        ),
+        (b'', b'run', False, _(b'performs an upgrade')),
+        (b'', b'backup', True, _(b'keep the old repository content around')),
+        (b'', b'changelog', None, _(b'select the changelog for upgrade')),
+        (b'', b'manifest', None, _(b'select the manifest for upgrade')),
     ],
 )
 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
@@ -3457,57 +3515,61 @@
 
 
 @command(
-    'debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'), inferrepo=True
+    b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
 )
 def debugwalk(ui, repo, *pats, **opts):
     """show how files match on given patterns"""
     opts = pycompat.byteskwargs(opts)
     m = scmutil.match(repo[None], pats, opts)
     if ui.verbose:
-        ui.write('* matcher:\n', stringutil.prettyrepr(m), '\n')
+        ui.write(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
     items = list(repo[None].walk(m))
     if not items:
         return
     f = lambda fn: fn
-    if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
+    if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
         f = lambda fn: util.normpath(fn)
-    fmt = 'f  %%-%ds  %%-%ds  %%s' % (
+    fmt = b'f  %%-%ds  %%-%ds  %%s' % (
         max([len(abs) for abs in items]),
         max([len(repo.pathto(abs)) for abs in items]),
     )
     for abs in items:
-        line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
-        ui.write("%s\n" % line.rstrip())
-
-
-@command('debugwhyunstable', [], _('REV'))
+        line = fmt % (
+            abs,
+            f(repo.pathto(abs)),
+            m.exact(abs) and b'exact' or b'',
+        )
+        ui.write(b"%s\n" % line.rstrip())
+
+
+@command(b'debugwhyunstable', [], _(b'REV'))
 def debugwhyunstable(ui, repo, rev):
     """explain instabilities of a changeset"""
     for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
-        dnodes = ''
-        if entry.get('divergentnodes'):
+        dnodes = b''
+        if entry.get(b'divergentnodes'):
             dnodes = (
-                ' '.join(
-                    '%s (%s)' % (ctx.hex(), ctx.phasestr())
-                    for ctx in entry['divergentnodes']
+                b' '.join(
+                    b'%s (%s)' % (ctx.hex(), ctx.phasestr())
+                    for ctx in entry[b'divergentnodes']
                 )
-                + ' '
+                + b' '
             )
         ui.write(
-            '%s: %s%s %s\n'
-            % (entry['instability'], dnodes, entry['reason'], entry['node'])
+            b'%s: %s%s %s\n'
+            % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
         )
 
 
 @command(
-    'debugwireargs',
+    b'debugwireargs',
     [
-        ('', 'three', '', 'three'),
-        ('', 'four', '', 'four'),
-        ('', 'five', '', 'five'),
+        (b'', b'three', b'', b'three'),
+        (b'', b'four', b'', b'four'),
+        (b'', b'five', b'', b'five'),
     ]
     + cmdutil.remoteopts,
-    _('REPO [OPTIONS]... [ONE [TWO]]'),
+    _(b'REPO [OPTIONS]... [ONE [TWO]]'),
     norepo=True,
 )
 def debugwireargs(ui, repopath, *vals, **opts):
@@ -3523,9 +3585,9 @@
     # run twice to check that we don't mess up the stream for the next command
     res1 = repo.debugwireargs(*vals, **args)
     res2 = repo.debugwireargs(*vals, **args)
-    ui.write("%s\n" % res1)
+    ui.write(b"%s\n" % res1)
     if res1 != res2:
-        ui.warn("%s\n" % res2)
+        ui.warn(b"%s\n" % res2)
 
 
 def _parsewirelangblocks(fh):
@@ -3554,7 +3616,7 @@
         # Else we start with an indent.
 
         if not activeaction:
-            raise error.Abort(_('indented line outside of block'))
+            raise error.Abort(_(b'indented line outside of block'))
 
         indent = len(line) - len(line.lstrip())
 
@@ -3571,20 +3633,25 @@
 
 
 @command(
-    'debugwireproto',
+    b'debugwireproto',
     [
-        ('', 'localssh', False, _('start an SSH server for this repo')),
-        ('', 'peer', '', _('construct a specific version of the peer')),
-        ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
+        (b'', b'localssh', False, _(b'start an SSH server for this repo')),
+        (b'', b'peer', b'', _(b'construct a specific version of the peer')),
         (
-            '',
-            'nologhandshake',
+            b'',
+            b'noreadstderr',
             False,
-            _('do not log I/O related to the peer handshake'),
+            _(b'do not read from stderr of the remote'),
+        ),
+        (
+            b'',
+            b'nologhandshake',
+            False,
+            _(b'do not log I/O related to the peer handshake'),
         ),
     ]
     + cmdutil.remoteopts,
-    _('[PATH]'),
+    _(b'[PATH]'),
     optionalrepo=True,
 )
 def debugwireproto(ui, repo, path=None, **opts):
@@ -3770,22 +3837,27 @@
     """
     opts = pycompat.byteskwargs(opts)
 
-    if opts['localssh'] and not repo:
-        raise error.Abort(_('--localssh requires a repository'))
-
-    if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
+    if opts[b'localssh'] and not repo:
+        raise error.Abort(_(b'--localssh requires a repository'))
+
+    if opts[b'peer'] and opts[b'peer'] not in (
+        b'raw',
+        b'http2',
+        b'ssh1',
+        b'ssh2',
+    ):
         raise error.Abort(
-            _('invalid value for --peer'),
-            hint=_('valid values are "raw", "ssh1", and "ssh2"'),
+            _(b'invalid value for --peer'),
+            hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
         )
 
-    if path and opts['localssh']:
+    if path and opts[b'localssh']:
         raise error.Abort(
-            _('cannot specify --localssh with an explicit ' 'path')
+            _(b'cannot specify --localssh with an explicit ' b'path')
         )
 
     if ui.interactive():
-        ui.write(_('(waiting for commands on stdin)\n'))
+        ui.write(_(b'(waiting for commands on stdin)\n'))
 
     blocks = list(_parsewirelangblocks(ui.fin))
 
@@ -3795,15 +3867,15 @@
     stderr = None
     opener = None
 
-    if opts['localssh']:
+    if opts[b'localssh']:
         # We start the SSH server in its own process so there is process
         # separation. This prevents a whole class of potential bugs around
         # shared state from interfering with server operation.
         args = procutil.hgcmd() + [
-            '-R',
+            b'-R',
             repo.root,
-            'debugserve',
-            '--sshstdio',
+            b'debugserve',
+            b'--sshstdio',
         ]
         proc = subprocess.Popen(
             pycompat.rapply(procutil.tonativestr, args),
@@ -3818,7 +3890,7 @@
         stderr = proc.stderr
 
         # We turn the pipes into observers so we can log I/O.
-        if ui.verbose or opts['peer'] == 'raw':
+        if ui.verbose or opts[b'peer'] == b'raw':
             stdin = util.makeloggingfileobject(
                 ui, proc.stdin, b'i', logdata=True
             )
@@ -3831,11 +3903,11 @@
 
         # --localssh also implies the peer connection settings.
 
-        url = 'ssh://localserver'
-        autoreadstderr = not opts['noreadstderr']
-
-        if opts['peer'] == 'ssh1':
-            ui.write(_('creating ssh peer for wire protocol version 1\n'))
+        url = b'ssh://localserver'
+        autoreadstderr = not opts[b'noreadstderr']
+
+        if opts[b'peer'] == b'ssh1':
+            ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
             peer = sshpeer.sshv1peer(
                 ui,
                 url,
@@ -3846,8 +3918,8 @@
                 None,
                 autoreadstderr=autoreadstderr,
             )
-        elif opts['peer'] == 'ssh2':
-            ui.write(_('creating ssh peer for wire protocol version 2\n'))
+        elif opts[b'peer'] == b'ssh2':
+            ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
             peer = sshpeer.sshv2peer(
                 ui,
                 url,
@@ -3858,11 +3930,11 @@
                 None,
                 autoreadstderr=autoreadstderr,
             )
-        elif opts['peer'] == 'raw':
-            ui.write(_('using raw connection to peer\n'))
+        elif opts[b'peer'] == b'raw':
+            ui.write(_(b'using raw connection to peer\n'))
             peer = None
         else:
-            ui.write(_('creating ssh peer from handshake results\n'))
+            ui.write(_(b'creating ssh peer from handshake results\n'))
             peer = sshpeer.makepeer(
                 ui,
                 url,
@@ -3878,8 +3950,8 @@
         # TODO consider not doing this because we skip
         # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
         u = util.url(path)
-        if u.scheme != 'http':
-            raise error.Abort(_('only http:// paths are currently supported'))
+        if u.scheme != b'http':
+            raise error.Abort(_(b'only http:// paths are currently supported'))
 
         url, authinfo = u.authinfo()
         openerargs = {
@@ -3902,94 +3974,94 @@
         # Don't send default headers when in raw mode. This allows us to
         # bypass most of the behavior of our URL handling code so we can
         # have near complete control over what's sent on the wire.
-        if opts['peer'] == 'raw':
+        if opts[b'peer'] == b'raw':
             openerargs[r'sendaccept'] = False
 
         opener = urlmod.opener(ui, authinfo, **openerargs)
 
-        if opts['peer'] == 'http2':
-            ui.write(_('creating http peer for wire protocol version 2\n'))
+        if opts[b'peer'] == b'http2':
+            ui.write(_(b'creating http peer for wire protocol version 2\n'))
             # We go through makepeer() because we need an API descriptor for
             # the peer instance to be useful.
             with ui.configoverride(
-                {('experimental', 'httppeer.advertise-v2'): True}
+                {(b'experimental', b'httppeer.advertise-v2'): True}
             ):
-                if opts['nologhandshake']:
+                if opts[b'nologhandshake']:
                     ui.pushbuffer()
 
                 peer = httppeer.makepeer(ui, path, opener=opener)
 
-                if opts['nologhandshake']:
+                if opts[b'nologhandshake']:
                     ui.popbuffer()
 
             if not isinstance(peer, httppeer.httpv2peer):
                 raise error.Abort(
                     _(
-                        'could not instantiate HTTP peer for '
-                        'wire protocol version 2'
+                        b'could not instantiate HTTP peer for '
+                        b'wire protocol version 2'
                     ),
                     hint=_(
-                        'the server may not have the feature '
-                        'enabled or is not allowing this '
-                        'client version'
+                        b'the server may not have the feature '
+                        b'enabled or is not allowing this '
+                        b'client version'
                     ),
                 )
 
-        elif opts['peer'] == 'raw':
-            ui.write(_('using raw connection to peer\n'))
+        elif opts[b'peer'] == b'raw':
+            ui.write(_(b'using raw connection to peer\n'))
             peer = None
-        elif opts['peer']:
+        elif opts[b'peer']:
             raise error.Abort(
-                _('--peer %s not supported with HTTP peers') % opts['peer']
+                _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
             )
         else:
             peer = httppeer.makepeer(ui, path, opener=opener)
 
         # We /could/ populate stdin/stdout with sock.makefile()...
     else:
-        raise error.Abort(_('unsupported connection configuration'))
+        raise error.Abort(_(b'unsupported connection configuration'))
 
     batchedcommands = None
 
     # Now perform actions based on the parsed wire language instructions.
     for action, lines in blocks:
-        if action in ('raw', 'raw+'):
+        if action in (b'raw', b'raw+'):
             if not stdin:
-                raise error.Abort(_('cannot call raw/raw+ on this peer'))
+                raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
 
             # Concatenate the data together.
-            data = ''.join(l.lstrip() for l in lines)
+            data = b''.join(l.lstrip() for l in lines)
             data = stringutil.unescapestr(data)
             stdin.write(data)
 
-            if action == 'raw+':
+            if action == b'raw+':
                 stdin.flush()
-        elif action == 'flush':
+        elif action == b'flush':
             if not stdin:
-                raise error.Abort(_('cannot call flush on this peer'))
+                raise error.Abort(_(b'cannot call flush on this peer'))
             stdin.flush()
-        elif action.startswith('command'):
+        elif action.startswith(b'command'):
             if not peer:
                 raise error.Abort(
                     _(
-                        'cannot send commands unless peer instance '
-                        'is available'
+                        b'cannot send commands unless peer instance '
+                        b'is available'
                     )
                 )
 
-            command = action.split(' ', 1)[1]
+            command = action.split(b' ', 1)[1]
 
             args = {}
             for line in lines:
                 # We need to allow empty values.
-                fields = line.lstrip().split(' ', 1)
+                fields = line.lstrip().split(b' ', 1)
                 if len(fields) == 1:
                     key = fields[0]
-                    value = ''
+                    value = b''
                 else:
                     key, value = fields
 
-                if value.startswith('eval:'):
+                if value.startswith(b'eval:'):
                     value = stringutil.evalpythonliteral(value[5:])
                 else:
                     value = stringutil.unescapestr(value)
@@ -4000,17 +4072,17 @@
                 batchedcommands.append((command, args))
                 continue
 
-            ui.status(_('sending %s command\n') % command)
-
-            if 'PUSHFILE' in args:
-                with open(args['PUSHFILE'], r'rb') as fh:
-                    del args['PUSHFILE']
+            ui.status(_(b'sending %s command\n') % command)
+
+            if b'PUSHFILE' in args:
+                with open(args[b'PUSHFILE'], r'rb') as fh:
+                    del args[b'PUSHFILE']
                     res, output = peer._callpush(
                         command, fh, **pycompat.strkwargs(args)
                     )
-                    ui.status(_('result: %s\n') % stringutil.escapestr(res))
+                    ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
                     ui.status(
-                        _('remote output: %s\n') % stringutil.escapestr(output)
+                        _(b'remote output: %s\n') % stringutil.escapestr(output)
                     )
             else:
                 with peer.commandexecutor() as e:
@@ -4019,46 +4091,47 @@
                 if isinstance(res, wireprotov2peer.commandresponse):
                     val = res.objects()
                     ui.status(
-                        _('response: %s\n')
+                        _(b'response: %s\n')
                         % stringutil.pprint(val, bprefix=True, indent=2)
                     )
                 else:
                     ui.status(
-                        _('response: %s\n')
+                        _(b'response: %s\n')
                         % stringutil.pprint(res, bprefix=True, indent=2)
                     )
 
-        elif action == 'batchbegin':
+        elif action == b'batchbegin':
             if batchedcommands is not None:
-                raise error.Abort(_('nested batchbegin not allowed'))
+                raise error.Abort(_(b'nested batchbegin not allowed'))
 
             batchedcommands = []
-        elif action == 'batchsubmit':
+        elif action == b'batchsubmit':
             # There is a batching API we could go through. But it would be
             # difficult to normalize requests into function calls. It is easier
             # to bypass this layer and normalize to commands + args.
             ui.status(
-                _('sending batch with %d sub-commands\n') % len(batchedcommands)
+                _(b'sending batch with %d sub-commands\n')
+                % len(batchedcommands)
             )
             for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
                 ui.status(
-                    _('response #%d: %s\n') % (i, stringutil.escapestr(chunk))
+                    _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
                 )
 
             batchedcommands = None
 
-        elif action.startswith('httprequest '):
+        elif action.startswith(b'httprequest '):
             if not opener:
                 raise error.Abort(
-                    _('cannot use httprequest without an HTTP ' 'peer')
+                    _(b'cannot use httprequest without an HTTP ' b'peer')
                 )
 
-            request = action.split(' ', 2)
+            request = action.split(b' ', 2)
             if len(request) != 3:
                 raise error.Abort(
                     _(
-                        'invalid httprequest: expected format is '
-                        '"httprequest <method> <path>'
+                        b'invalid httprequest: expected format is '
+                        b'"httprequest <method> <path>'
                     )
                 )
 
@@ -4077,7 +4150,7 @@
                     continue
 
                 if line.startswith(b'BODYFILE '):
-                    with open(line.split(b' ', 1), 'rb') as fh:
+                    with open(line.split(b' ', 1), b'rb') as fh:
                         body = fh.read()
                 elif line.startswith(b'frame '):
                     frame = wireprotoframing.makeframefromhumanstring(
@@ -4087,7 +4160,7 @@
                     frames.append(frame)
                 else:
                     raise error.Abort(
-                        _('unknown argument to httprequest: %s') % line
+                        _(b'unknown argument to httprequest: %s') % line
                     )
 
             url = path + httppath
@@ -4113,45 +4186,47 @@
             ct = res.headers.get(r'Content-Type')
             if ct == r'application/mercurial-cbor':
                 ui.write(
-                    _('cbor> %s\n')
+                    _(b'cbor> %s\n')
                     % stringutil.pprint(
                         cborutil.decodeall(body), bprefix=True, indent=2
                     )
                 )
 
-        elif action == 'close':
+        elif action == b'close':
             peer.close()
-        elif action == 'readavailable':
+        elif action == b'readavailable':
             if not stdout or not stderr:
-                raise error.Abort(_('readavailable not available on this peer'))
+                raise error.Abort(
+                    _(b'readavailable not available on this peer')
+                )
 
             stdin.close()
             stdout.read()
             stderr.read()
 
-        elif action == 'readline':
+        elif action == b'readline':
             if not stdout:
-                raise error.Abort(_('readline not available on this peer'))
+                raise error.Abort(_(b'readline not available on this peer'))
             stdout.readline()
-        elif action == 'ereadline':
+        elif action == b'ereadline':
             if not stderr:
-                raise error.Abort(_('ereadline not available on this peer'))
+                raise error.Abort(_(b'ereadline not available on this peer'))
             stderr.readline()
-        elif action.startswith('read '):
-            count = int(action.split(' ', 1)[1])
+        elif action.startswith(b'read '):
+            count = int(action.split(b' ', 1)[1])
             if not stdout:
-                raise error.Abort(_('read not available on this peer'))
+                raise error.Abort(_(b'read not available on this peer'))
             stdout.read(count)
-        elif action.startswith('eread '):
-            count = int(action.split(' ', 1)[1])
+        elif action.startswith(b'eread '):
+            count = int(action.split(b' ', 1)[1])
             if not stderr:
-                raise error.Abort(_('eread not available on this peer'))
+                raise error.Abort(_(b'eread not available on this peer'))
             stderr.read(count)
         else:
-            raise error.Abort(_('unknown action: %s') % action)
+            raise error.Abort(_(b'unknown action: %s') % action)
 
     if batchedcommands is not None:
-        raise error.Abort(_('unclosed "batchbegin" request'))
+        raise error.Abort(_(b'unclosed "batchbegin" request'))
 
     if peer:
         peer.close()
--- a/mercurial/destutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/destutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -76,9 +76,9 @@
 
             # get the max revision for the given successors set,
             # i.e. the 'tip' of a set
-            node = repo.revs('max(%ln)', successors).first()
+            node = repo.revs(b'max(%ln)', successors).first()
             if bookmarks.isactivewdirparent(repo):
-                movemark = repo['.'].node()
+                movemark = repo[b'.'].node()
     return node, movemark, None
 
 
@@ -102,20 +102,20 @@
     currentbranch = wc.branch()
 
     if clean:
-        currentbranch = repo['.'].branch()
+        currentbranch = repo[b'.'].branch()
 
     if currentbranch in repo.branchmap():
         heads = repo.branchheads(currentbranch)
         if heads:
-            node = repo.revs('max(.::(%ln))', heads).first()
+            node = repo.revs(b'max(.::(%ln))', heads).first()
         if bookmarks.isactivewdirparent(repo):
-            movemark = repo['.'].node()
-    elif currentbranch == 'default' and not wc.p1():
+            movemark = repo[b'.'].node()
+    elif currentbranch == b'default' and not wc.p1():
         # "null" parent belongs to "default" branch, but it doesn't exist, so
         # update to the tipmost non-closed branch head
-        node = repo.revs('max(head() and not closed())').first()
+        node = repo.revs(b'max(head() and not closed())').first()
     else:
-        node = repo['.'].node()
+        node = repo[b'.'].node()
     return node, movemark, None
 
 
@@ -127,29 +127,29 @@
     if currentbranch in repo.branchmap():
         # here, all descendant branch heads are closed
         heads = repo.branchheads(currentbranch, closed=True)
-        assert heads, "any branch has at least one head"
-        node = repo.revs('max(.::(%ln))', heads).first()
+        assert heads, b"any branch has at least one head"
+        node = repo.revs(b'max(.::(%ln))', heads).first()
         assert node is not None, (
-            "any revision has at least " "one descendant branch head"
+            b"any revision has at least " b"one descendant branch head"
         )
         if bookmarks.isactivewdirparent(repo):
-            movemark = repo['.'].node()
+            movemark = repo[b'.'].node()
     else:
         # here, no "default" branch, and all branches are closed
-        node = repo.lookup('tip')
-        assert node is not None, "'tip' exists even in empty repository"
+        node = repo.lookup(b'tip')
+        assert node is not None, b"'tip' exists even in empty repository"
     return node, movemark, None
 
 
 # order in which each step should be evaluated
 # steps are run until one finds a destination
-destupdatesteps = ['evolution', 'bookmark', 'branch', 'branchfallback']
+destupdatesteps = [b'evolution', b'bookmark', b'branch', b'branchfallback']
 # mapping to ease extension overriding steps.
 destupdatestepmap = {
-    'evolution': _destupdateobs,
-    'bookmark': _destupdatebook,
-    'branch': _destupdatebranch,
-    'branchfallback': _destupdatebranchfallback,
+    b'evolution': _destupdateobs,
+    b'bookmark': _destupdatebook,
+    b'branch': _destupdatebranch,
+    b'branchfallback': _destupdatebranchfallback,
 }
 
 
@@ -176,108 +176,108 @@
 
 msgdestmerge = {
     # too many matching divergent bookmark
-    'toomanybookmarks': {
-        'merge': (
+    b'toomanybookmarks': {
+        b'merge': (
             _(
-                "multiple matching bookmarks to merge -"
-                " please merge with an explicit rev or bookmark"
+                b"multiple matching bookmarks to merge -"
+                b" please merge with an explicit rev or bookmark"
             ),
-            _("run 'hg heads' to see all heads"),
+            _(b"run 'hg heads' to see all heads"),
         ),
-        'rebase': (
+        b'rebase': (
             _(
-                "multiple matching bookmarks to rebase -"
-                " please rebase to an explicit rev or bookmark"
+                b"multiple matching bookmarks to rebase -"
+                b" please rebase to an explicit rev or bookmark"
             ),
-            _("run 'hg heads' to see all heads"),
+            _(b"run 'hg heads' to see all heads"),
         ),
     },
     # no other matching divergent bookmark
-    'nootherbookmarks': {
-        'merge': (
+    b'nootherbookmarks': {
+        b'merge': (
             _(
-                "no matching bookmark to merge - "
-                "please merge with an explicit rev or bookmark"
+                b"no matching bookmark to merge - "
+                b"please merge with an explicit rev or bookmark"
             ),
-            _("run 'hg heads' to see all heads"),
+            _(b"run 'hg heads' to see all heads"),
         ),
-        'rebase': (
+        b'rebase': (
             _(
-                "no matching bookmark to rebase - "
-                "please rebase to an explicit rev or bookmark"
+                b"no matching bookmark to rebase - "
+                b"please rebase to an explicit rev or bookmark"
             ),
-            _("run 'hg heads' to see all heads"),
+            _(b"run 'hg heads' to see all heads"),
         ),
     },
     # branch have too many unbookmarked heads, no obvious destination
-    'toomanyheads': {
-        'merge': (
-            _("branch '%s' has %d heads - please merge with an explicit rev"),
-            _("run 'hg heads .' to see heads"),
+    b'toomanyheads': {
+        b'merge': (
+            _(b"branch '%s' has %d heads - please merge with an explicit rev"),
+            _(b"run 'hg heads .' to see heads"),
         ),
-        'rebase': (
-            _("branch '%s' has %d heads - please rebase to an explicit rev"),
-            _("run 'hg heads .' to see heads"),
+        b'rebase': (
+            _(b"branch '%s' has %d heads - please rebase to an explicit rev"),
+            _(b"run 'hg heads .' to see heads"),
         ),
     },
     # branch have no other unbookmarked heads
-    'bookmarkedheads': {
-        'merge': (
-            _("heads are bookmarked - please merge with an explicit rev"),
-            _("run 'hg heads' to see all heads"),
+    b'bookmarkedheads': {
+        b'merge': (
+            _(b"heads are bookmarked - please merge with an explicit rev"),
+            _(b"run 'hg heads' to see all heads"),
         ),
-        'rebase': (
-            _("heads are bookmarked - please rebase to an explicit rev"),
-            _("run 'hg heads' to see all heads"),
+        b'rebase': (
+            _(b"heads are bookmarked - please rebase to an explicit rev"),
+            _(b"run 'hg heads' to see all heads"),
         ),
     },
     # branch have just a single heads, but there is other branches
-    'nootherbranchheads': {
-        'merge': (
-            _("branch '%s' has one head - please merge with an explicit rev"),
-            _("run 'hg heads' to see all heads"),
+    b'nootherbranchheads': {
+        b'merge': (
+            _(b"branch '%s' has one head - please merge with an explicit rev"),
+            _(b"run 'hg heads' to see all heads"),
         ),
-        'rebase': (
-            _("branch '%s' has one head - please rebase to an explicit rev"),
-            _("run 'hg heads' to see all heads"),
+        b'rebase': (
+            _(b"branch '%s' has one head - please rebase to an explicit rev"),
+            _(b"run 'hg heads' to see all heads"),
         ),
     },
     # repository have a single head
-    'nootherheads': {
-        'merge': (_('nothing to merge'), None),
-        'rebase': (_('nothing to rebase'), None),
+    b'nootherheads': {
+        b'merge': (_(b'nothing to merge'), None),
+        b'rebase': (_(b'nothing to rebase'), None),
     },
     # repository have a single head and we are not on it
-    'nootherheadsbehind': {
-        'merge': (_('nothing to merge'), _("use 'hg update' instead")),
-        'rebase': (_('nothing to rebase'), _("use 'hg update' instead")),
+    b'nootherheadsbehind': {
+        b'merge': (_(b'nothing to merge'), _(b"use 'hg update' instead")),
+        b'rebase': (_(b'nothing to rebase'), _(b"use 'hg update' instead")),
     },
     # We are not on a head
-    'notatheads': {
-        'merge': (
-            _('working directory not at a head revision'),
-            _("use 'hg update' or merge with an explicit revision"),
+    b'notatheads': {
+        b'merge': (
+            _(b'working directory not at a head revision'),
+            _(b"use 'hg update' or merge with an explicit revision"),
         ),
-        'rebase': (
-            _('working directory not at a head revision'),
-            _("use 'hg update' or rebase to an explicit revision"),
+        b'rebase': (
+            _(b'working directory not at a head revision'),
+            _(b"use 'hg update' or rebase to an explicit revision"),
         ),
     },
-    'emptysourceset': {
-        'merge': (_('source set is empty'), None),
-        'rebase': (_('source set is empty'), None),
+    b'emptysourceset': {
+        b'merge': (_(b'source set is empty'), None),
+        b'rebase': (_(b'source set is empty'), None),
     },
-    'multiplebranchessourceset': {
-        'merge': (_('source set is rooted in multiple branches'), None),
-        'rebase': (
-            _('rebaseset is rooted in multiple named branches'),
-            _('specify an explicit destination with --dest'),
+    b'multiplebranchessourceset': {
+        b'merge': (_(b'source set is rooted in multiple branches'), None),
+        b'rebase': (
+            _(b'rebaseset is rooted in multiple named branches'),
+            _(b'specify an explicit destination with --dest'),
         ),
     },
 }
 
 
-def _destmergebook(repo, action='merge', sourceset=None, destspace=None):
+def _destmergebook(repo, action=b'merge', sourceset=None, destspace=None):
     """find merge destination in the active bookmark case"""
     node = None
     bmheads = bookmarks.headsforactive(repo)
@@ -288,17 +288,17 @@
         else:
             node = bmheads[0]
     elif len(bmheads) > 2:
-        msg, hint = msgdestmerge['toomanybookmarks'][action]
+        msg, hint = msgdestmerge[b'toomanybookmarks'][action]
         raise error.ManyMergeDestAbort(msg, hint=hint)
     elif len(bmheads) <= 1:
-        msg, hint = msgdestmerge['nootherbookmarks'][action]
+        msg, hint = msgdestmerge[b'nootherbookmarks'][action]
         raise error.NoMergeDestAbort(msg, hint=hint)
     assert node is not None
     return node
 
 
 def _destmergebranch(
-    repo, action='merge', sourceset=None, onheadcheck=True, destspace=None
+    repo, action=b'merge', sourceset=None, onheadcheck=True, destspace=None
 ):
     """find merge destination based on branch heads"""
     node = None
@@ -307,36 +307,36 @@
         sourceset = [repo[repo.dirstate.p1()].rev()]
         branch = repo.dirstate.branch()
     elif not sourceset:
-        msg, hint = msgdestmerge['emptysourceset'][action]
+        msg, hint = msgdestmerge[b'emptysourceset'][action]
         raise error.NoMergeDestAbort(msg, hint=hint)
     else:
         branch = None
-        for ctx in repo.set('roots(%ld::%ld)', sourceset, sourceset):
+        for ctx in repo.set(b'roots(%ld::%ld)', sourceset, sourceset):
             if branch is not None and ctx.branch() != branch:
-                msg, hint = msgdestmerge['multiplebranchessourceset'][action]
+                msg, hint = msgdestmerge[b'multiplebranchessourceset'][action]
                 raise error.ManyMergeDestAbort(msg, hint=hint)
             branch = ctx.branch()
 
     bheads = repo.branchheads(branch)
-    onhead = repo.revs('%ld and %ln', sourceset, bheads)
+    onhead = repo.revs(b'%ld and %ln', sourceset, bheads)
     if onheadcheck and not onhead:
         # Case A: working copy if not on a head. (merge only)
         #
         # This is probably a user mistake We bailout pointing at 'hg update'
         if len(repo.heads()) <= 1:
-            msg, hint = msgdestmerge['nootherheadsbehind'][action]
+            msg, hint = msgdestmerge[b'nootherheadsbehind'][action]
         else:
-            msg, hint = msgdestmerge['notatheads'][action]
+            msg, hint = msgdestmerge[b'notatheads'][action]
         raise error.Abort(msg, hint=hint)
     # remove heads descendants of source from the set
-    bheads = list(repo.revs('%ln - (%ld::)', bheads, sourceset))
+    bheads = list(repo.revs(b'%ln - (%ld::)', bheads, sourceset))
     # filters out bookmarked heads
-    nbhs = list(repo.revs('%ld - bookmark()', bheads))
+    nbhs = list(repo.revs(b'%ld - bookmark()', bheads))
 
     if destspace is not None:
         # restrict search space
         # used in the 'hg pull --rebase' case, see issue 5214.
-        nbhs = list(repo.revs('%ld and %ld', destspace, nbhs))
+        nbhs = list(repo.revs(b'%ld and %ld', destspace, nbhs))
 
     if len(nbhs) > 1:
         # Case B: There is more than 1 other anonymous heads
@@ -344,7 +344,7 @@
         # This means that there will be more than 1 candidate. This is
         # ambiguous. We abort asking the user to pick as explicit destination
         # instead.
-        msg, hint = msgdestmerge['toomanyheads'][action]
+        msg, hint = msgdestmerge[b'toomanyheads'][action]
         msg %= (branch, len(bheads) + 1)
         raise error.ManyMergeDestAbort(msg, hint=hint)
     elif not nbhs:
@@ -353,16 +353,16 @@
         # This means that there is no natural candidate to merge with.
         # We abort, with various messages for various cases.
         if bheads:
-            msg, hint = msgdestmerge['bookmarkedheads'][action]
+            msg, hint = msgdestmerge[b'bookmarkedheads'][action]
         elif len(repo.heads()) > 1:
-            msg, hint = msgdestmerge['nootherbranchheads'][action]
+            msg, hint = msgdestmerge[b'nootherbranchheads'][action]
             msg %= branch
         elif not onhead:
             # if 'onheadcheck == False' (rebase case),
             # this was not caught in Case A.
-            msg, hint = msgdestmerge['nootherheadsbehind'][action]
+            msg, hint = msgdestmerge[b'nootherheadsbehind'][action]
         else:
-            msg, hint = msgdestmerge['nootherheads'][action]
+            msg, hint = msgdestmerge[b'nootherheads'][action]
         raise error.NoMergeDestAbort(msg, hint=hint)
     else:
         node = nbhs[0]
@@ -371,7 +371,7 @@
 
 
 def destmerge(
-    repo, action='merge', sourceset=None, onheadcheck=True, destspace=None
+    repo, action=b'merge', sourceset=None, onheadcheck=True, destspace=None
 ):
     """return the default destination for a merge
 
@@ -398,14 +398,16 @@
 
 def desthistedit(ui, repo):
     """Default base revision to edit for `hg histedit`."""
-    default = ui.config('histedit', 'defaultrev')
+    default = ui.config(b'histedit', b'defaultrev')
 
     if default is None:
         revs = stack.getstack(repo)
     elif default:
         revs = scmutil.revrange(repo, [default])
     else:
-        raise error.Abort(_("config option histedit.defaultrev can't be empty"))
+        raise error.Abort(
+            _(b"config option histedit.defaultrev can't be empty")
+        )
 
     if revs:
         # Take the first revision of the revset as the root
@@ -422,11 +424,11 @@
 def _statusotherbook(ui, repo):
     bmheads = bookmarks.headsforactive(repo)
     curhead = repo._bookmarks[repo._activebookmark]
-    if repo.revs('%n and parents()', curhead):
+    if repo.revs(b'%n and parents()', curhead):
         # we are on the active bookmark
         bmheads = [b for b in bmheads if curhead != b]
         if bmheads:
-            msg = _('%i other divergent bookmarks for "%s"\n')
+            msg = _(b'%i other divergent bookmarks for "%s"\n')
             ui.status(msg % (len(bmheads), repo._activebookmark))
 
 
@@ -434,7 +436,7 @@
     currentbranch = repo.dirstate.branch()
     allheads = repo.branchheads(currentbranch, closed=True)
     heads = repo.branchheads(currentbranch)
-    if repo.revs('%ln and parents()', allheads):
+    if repo.revs(b'%ln and parents()', allheads):
         # we are on a head, even though it might be closed
         #
         #  on closed otherheads
@@ -444,35 +446,35 @@
         #      x        0       there is only one non-closed branch head
         #               N       there are some non-closed branch heads
         #  ========= ==========
-        otherheads = repo.revs('%ln - parents()', heads)
-        if repo['.'].closesbranch():
+        otherheads = repo.revs(b'%ln - parents()', heads)
+        if repo[b'.'].closesbranch():
             ui.warn(
                 _(
-                    'no open descendant heads on branch "%s", '
-                    'updating to a closed head\n'
+                    b'no open descendant heads on branch "%s", '
+                    b'updating to a closed head\n'
                 )
                 % currentbranch
             )
             if otherheads:
                 ui.warn(
                     _(
-                        "(committing will reopen the head, "
-                        "use 'hg heads .' to see %i other heads)\n"
+                        b"(committing will reopen the head, "
+                        b"use 'hg heads .' to see %i other heads)\n"
                     )
                     % (len(otherheads))
                 )
             else:
                 ui.warn(
-                    _('(committing will reopen branch "%s")\n') % currentbranch
+                    _(b'(committing will reopen branch "%s")\n') % currentbranch
                 )
         elif otherheads:
-            curhead = repo['.']
+            curhead = repo[b'.']
             ui.status(
-                _('updated to "%s: %s"\n')
-                % (curhead, curhead.description().split('\n')[0])
+                _(b'updated to "%s: %s"\n')
+                % (curhead, curhead.description().split(b'\n')[0])
             )
             ui.status(
-                _('%i other heads for branch "%s"\n')
+                _(b'%i other heads for branch "%s"\n')
                 % (len(otherheads), currentbranch)
             )
 
--- a/mercurial/diffhelper.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/diffhelper.py	Sun Oct 06 09:48:39 2019 -0400
@@ -31,18 +31,18 @@
         for i in pycompat.xrange(num):
             s = fp.readline()
             if not s:
-                raise error.ParseError(_('incomplete hunk'))
-            if s == "\\ No newline at end of file\n":
+                raise error.ParseError(_(b'incomplete hunk'))
+            if s == b"\\ No newline at end of file\n":
                 fixnewline(hunk, a, b)
                 continue
-            if s == '\n' or s == '\r\n':
+            if s == b'\n' or s == b'\r\n':
                 # Some patches may be missing the control char
                 # on empty lines. Supply a leading space.
-                s = ' ' + s
+                s = b' ' + s
             hunk.append(s)
-            if s.startswith('+'):
+            if s.startswith(b'+'):
                 b.append(s[1:])
-            elif s.startswith('-'):
+            elif s.startswith(b'-'):
                 a.append(s)
             else:
                 b.append(s[1:])
@@ -53,14 +53,14 @@
     """Fix up the last lines of a and b when the patch has no newline at EOF"""
     l = hunk[-1]
     # tolerate CRLF in last line
-    if l.endswith('\r\n'):
+    if l.endswith(b'\r\n'):
         hline = l[:-2]
     else:
         hline = l[:-1]
 
-    if hline.startswith((' ', '+')):
+    if hline.startswith((b' ', b'+')):
         b[-1] = hline[1:]
-    if hline.startswith((' ', '-')):
+    if hline.startswith((b' ', b'-')):
         a[-1] = hline
     hunk[-1] = hline
 
--- a/mercurial/diffutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/diffutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -18,7 +18,7 @@
 
 
 def diffallopts(
-    ui, opts=None, untrusted=False, section='diff', configprefix=''
+    ui, opts=None, untrusted=False, section=b'diff', configprefix=b''
 ):
     '''return diffopts with all features supported and parsed'''
     return difffeatureopts(
@@ -37,11 +37,11 @@
     ui,
     opts=None,
     untrusted=False,
-    section='diff',
+    section=b'diff',
     git=False,
     whitespace=False,
     formatchanging=False,
-    configprefix='',
+    configprefix=b'',
 ):
     '''return diffopts with only opted-in features parsed
 
@@ -72,24 +72,24 @@
 
     # core options, expected to be understood by every diff parser
     buildopts = {
-        'nodates': get('nodates'),
-        'showfunc': get('show_function', 'showfunc'),
-        'context': get('unified', getter=ui.config),
+        b'nodates': get(b'nodates'),
+        b'showfunc': get(b'show_function', b'showfunc'),
+        b'context': get(b'unified', getter=ui.config),
     }
-    buildopts['xdiff'] = ui.configbool('experimental', 'xdiff')
+    buildopts[b'xdiff'] = ui.configbool(b'experimental', b'xdiff')
 
     if git:
-        buildopts['git'] = get('git')
+        buildopts[b'git'] = get(b'git')
 
         # since this is in the experimental section, we need to call
         # ui.configbool directory
-        buildopts['showsimilarity'] = ui.configbool(
-            'experimental', 'extendedheader.similarity'
+        buildopts[b'showsimilarity'] = ui.configbool(
+            b'experimental', b'extendedheader.similarity'
         )
 
         # need to inspect the ui object instead of using get() since we want to
         # test for an int
-        hconf = ui.config('experimental', 'extendedheader.index')
+        hconf = ui.config(b'experimental', b'extendedheader.index')
         if hconf is not None:
             hlen = None
             try:
@@ -97,38 +97,40 @@
                 # word (e.g. short, full, none)
                 hlen = int(hconf)
                 if hlen < 0 or hlen > 40:
-                    msg = _("invalid length for extendedheader.index: '%d'\n")
+                    msg = _(b"invalid length for extendedheader.index: '%d'\n")
                     ui.warn(msg % hlen)
             except ValueError:
                 # default value
-                if hconf == 'short' or hconf == '':
+                if hconf == b'short' or hconf == b'':
                     hlen = 12
-                elif hconf == 'full':
+                elif hconf == b'full':
                     hlen = 40
-                elif hconf != 'none':
-                    msg = _("invalid value for extendedheader.index: '%s'\n")
+                elif hconf != b'none':
+                    msg = _(b"invalid value for extendedheader.index: '%s'\n")
                     ui.warn(msg % hconf)
             finally:
-                buildopts['index'] = hlen
+                buildopts[b'index'] = hlen
 
     if whitespace:
-        buildopts['ignorews'] = get('ignore_all_space', 'ignorews')
-        buildopts['ignorewsamount'] = get(
-            'ignore_space_change', 'ignorewsamount'
+        buildopts[b'ignorews'] = get(b'ignore_all_space', b'ignorews')
+        buildopts[b'ignorewsamount'] = get(
+            b'ignore_space_change', b'ignorewsamount'
         )
-        buildopts['ignoreblanklines'] = get(
-            'ignore_blank_lines', 'ignoreblanklines'
+        buildopts[b'ignoreblanklines'] = get(
+            b'ignore_blank_lines', b'ignoreblanklines'
         )
-        buildopts['ignorewseol'] = get('ignore_space_at_eol', 'ignorewseol')
+        buildopts[b'ignorewseol'] = get(b'ignore_space_at_eol', b'ignorewseol')
     if formatchanging:
-        buildopts['text'] = opts and opts.get('text')
-        binary = None if opts is None else opts.get('binary')
-        buildopts['nobinary'] = (
+        buildopts[b'text'] = opts and opts.get(b'text')
+        binary = None if opts is None else opts.get(b'binary')
+        buildopts[b'nobinary'] = (
             not binary
             if binary is not None
-            else get('nobinary', forceplain=False)
+            else get(b'nobinary', forceplain=False)
         )
-        buildopts['noprefix'] = get('noprefix', forceplain=False)
-        buildopts['worddiff'] = get('word_diff', 'word-diff', forceplain=False)
+        buildopts[b'noprefix'] = get(b'noprefix', forceplain=False)
+        buildopts[b'worddiff'] = get(
+            b'word_diff', b'word-diff', forceplain=False
+        )
 
     return mdiff.diffopts(**pycompat.strkwargs(buildopts))
--- a/mercurial/dirstate.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/dirstate.py	Sun Oct 06 09:48:39 2019 -0400
@@ -87,8 +87,8 @@
         self._ui = ui
         self._filecache = {}
         self._parentwriters = 0
-        self._filename = 'dirstate'
-        self._pendingfilename = '%s.pending' % self._filename
+        self._filename = b'dirstate'
+        self._pendingfilename = b'%s.pending' % self._filename
         self._plchangecallbacks = {}
         self._origpl = None
         self._updatedfiles = set()
@@ -140,14 +140,14 @@
         # cache to keep the lookup fast.)
         return self._sparsematchfn()
 
-    @repocache('branch')
+    @repocache(b'branch')
     def _branch(self):
         try:
-            return self._opener.read("branch").strip() or "default"
+            return self._opener.read(b"branch").strip() or b"default"
         except IOError as inst:
             if inst.errno != errno.ENOENT:
                 raise
-            return "default"
+            return b"default"
 
     @property
     def _pl(self):
@@ -156,18 +156,18 @@
     def hasdir(self, d):
         return self._map.hastrackeddir(d)
 
-    @rootcache('.hgignore')
+    @rootcache(b'.hgignore')
     def _ignore(self):
         files = self._ignorefiles()
         if not files:
             return matchmod.never()
 
-        pats = ['include:%s' % f for f in files]
-        return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
+        pats = [b'include:%s' % f for f in files]
+        return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
 
     @propertycache
     def _slash(self):
-        return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
+        return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
 
     @propertycache
     def _checklink(self):
@@ -179,7 +179,7 @@
 
     @propertycache
     def _checkcase(self):
-        return not util.fscasesensitive(self._join('.hg'))
+        return not util.fscasesensitive(self._join(b'.hg'))
 
     def _join(self, f):
         # much faster than os.path.join()
@@ -193,12 +193,12 @@
                 try:
                     st = os.lstat(self._join(x))
                     if util.statislink(st):
-                        return 'l'
+                        return b'l'
                     if util.statisexec(st):
-                        return 'x'
+                        return b'x'
                 except OSError:
                     pass
-                return ''
+                return b''
 
             return f
 
@@ -207,20 +207,20 @@
 
             def f(x):
                 if os.path.islink(self._join(x)):
-                    return 'l'
-                if 'x' in fallback(x):
-                    return 'x'
-                return ''
+                    return b'l'
+                if b'x' in fallback(x):
+                    return b'x'
+                return b''
 
             return f
         if self._checkexec:
 
             def f(x):
-                if 'l' in fallback(x):
-                    return 'l'
+                if b'l' in fallback(x):
+                    return b'l'
                 if util.isexec(self._join(x)):
-                    return 'x'
-                return ''
+                    return b'x'
+                return b''
 
             return f
         else:
@@ -229,7 +229,7 @@
     @propertycache
     def _cwd(self):
         # internal config: ui.forcecwd
-        forcecwd = self._ui.config('ui', 'forcecwd')
+        forcecwd = self._ui.config(b'ui', b'forcecwd')
         if forcecwd:
             return forcecwd
         return encoding.getcwd()
@@ -243,7 +243,7 @@
         '''
         cwd = self._cwd
         if cwd == self._root:
-            return ''
+            return b''
         # self._root ends with a path separator if self._root is '/' or 'C:\'
         rootsep = self._root
         if not util.endswithsep(rootsep):
@@ -272,7 +272,7 @@
           a  marked for addition
           ?  not tracked
         '''
-        return self._map.get(key, ("?",))[0]
+        return self._map.get(key, (b"?",))[0]
 
     def __contains__(self, key):
         return key in self._map
@@ -308,8 +308,8 @@
         """
         if self._parentwriters == 0:
             raise ValueError(
-                "cannot set dirstate parent outside of "
-                "dirstate.parentchange context manager"
+                b"cannot set dirstate parent outside of "
+                b"dirstate.parentchange context manager"
             )
 
         self._dirty = True
@@ -328,13 +328,13 @@
                     continue
 
                 # Discard 'm' markers when moving away from a merge state
-                if s[0] == 'm':
+                if s[0] == b'm':
                     source = self._map.copymap.get(f)
                     if source:
                         copies[f] = source
                     self.normallookup(f)
                 # Also fix up otherparent markers
-                elif s[0] == 'n' and s[2] == -2:
+                elif s[0] == b'n' and s[2] == -2:
                     source = self._map.copymap.get(f)
                     if source:
                         copies[f] = source
@@ -343,14 +343,14 @@
 
     def setbranch(self, branch):
         self.__class__._branch.set(self, encoding.fromlocal(branch))
-        f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
+        f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
         try:
-            f.write(self._branch + '\n')
+            f.write(self._branch + b'\n')
             f.close()
 
             # make sure filecache has the correct stat info for _branch after
             # replacing the underlying file
-            ce = self._filecache['_branch']
+            ce = self._filecache[b'_branch']
             if ce:
                 ce.refresh()
         except:  # re-raises
@@ -393,20 +393,20 @@
 
     def _addpath(self, f, state, mode, size, mtime):
         oldstate = self[f]
-        if state == 'a' or oldstate == 'r':
+        if state == b'a' or oldstate == b'r':
             scmutil.checkfilename(f)
             if self._map.hastrackeddir(f):
                 raise error.Abort(
-                    _('directory %r already in dirstate') % pycompat.bytestr(f)
+                    _(b'directory %r already in dirstate') % pycompat.bytestr(f)
                 )
             # shadows
             for d in util.finddirs(f):
                 if self._map.hastrackeddir(d):
                     break
                 entry = self._map.get(d)
-                if entry is not None and entry[0] != 'r':
+                if entry is not None and entry[0] != b'r':
                     raise error.Abort(
-                        _('file %r in dirstate clashes with %r')
+                        _(b'file %r in dirstate clashes with %r')
                         % (pycompat.bytestr(d), pycompat.bytestr(f))
                     )
         self._dirty = True
@@ -430,7 +430,7 @@
             mode = s.st_mode
             size = s.st_size
             mtime = s[stat.ST_MTIME]
-        self._addpath(f, 'n', mode, size & _rangemask, mtime & _rangemask)
+        self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
         self._map.copymap.pop(f, None)
         if f in self._map.nonnormalset:
             self._map.nonnormalset.remove(f)
@@ -448,7 +448,7 @@
             # being removed, restore that state.
             entry = self._map.get(f)
             if entry is not None:
-                if entry[0] == 'r' and entry[2] in (-1, -2):
+                if entry[0] == b'r' and entry[2] in (-1, -2):
                     source = self._map.copymap.get(f)
                     if entry[2] == -1:
                         self.merge(f)
@@ -457,28 +457,28 @@
                     if source:
                         self.copy(source, f)
                     return
-                if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
+                if entry[0] == b'm' or entry[0] == b'n' and entry[2] == -2:
                     return
-        self._addpath(f, 'n', 0, -1, -1)
+        self._addpath(f, b'n', 0, -1, -1)
         self._map.copymap.pop(f, None)
 
     def otherparent(self, f):
         '''Mark as coming from the other parent, always dirty.'''
         if self._pl[1] == nullid:
             raise error.Abort(
-                _("setting %r to other parent " "only allowed in merges") % f
+                _(b"setting %r to other parent " b"only allowed in merges") % f
             )
-        if f in self and self[f] == 'n':
+        if f in self and self[f] == b'n':
             # merge-like
-            self._addpath(f, 'm', 0, -2, -1)
+            self._addpath(f, b'm', 0, -2, -1)
         else:
             # add-like
-            self._addpath(f, 'n', 0, -2, -1)
+            self._addpath(f, b'n', 0, -2, -1)
         self._map.copymap.pop(f, None)
 
     def add(self, f):
         '''Mark a file added.'''
-        self._addpath(f, 'a', 0, -1, -1)
+        self._addpath(f, b'a', 0, -1, -1)
         self._map.copymap.pop(f, None)
 
     def remove(self, f):
@@ -490,9 +490,9 @@
             entry = self._map.get(f)
             if entry is not None:
                 # backup the previous state
-                if entry[0] == 'm':  # merge
+                if entry[0] == b'm':  # merge
                     size = -1
-                elif entry[0] == 'n' and entry[2] == -2:  # other parent
+                elif entry[0] == b'n' and entry[2] == -2:  # other parent
                     size = -2
                     self._map.otherparentset.add(f)
         self._updatedfiles.add(f)
@@ -519,21 +519,21 @@
             exists = os.path.lexists(os.path.join(self._root, path))
         if not exists:
             # Maybe a path component exists
-            if not ignoremissing and '/' in path:
-                d, f = path.rsplit('/', 1)
+            if not ignoremissing and b'/' in path:
+                d, f = path.rsplit(b'/', 1)
                 d = self._normalize(d, False, ignoremissing, None)
-                folded = d + "/" + f
+                folded = d + b"/" + f
             else:
                 # No path components, preserve original case
                 folded = path
         else:
             # recursively normalize leading directory components
             # against dirstate
-            if '/' in normed:
-                d, f = normed.rsplit('/', 1)
+            if b'/' in normed:
+                d, f = normed.rsplit(b'/', 1)
                 d = self._normalize(d, False, ignoremissing, True)
-                r = self._root + "/" + d
-                folded = d + "/" + util.fspath(f, r)
+                r = self._root + b"/" + d
+                folded = d + b"/" + util.fspath(f, r)
             else:
                 folded = util.fspath(normed, self._root)
             storemap[normed] = folded
@@ -645,14 +645,14 @@
 
             # delay writing in-memory changes out
             tr.addfilegenerator(
-                'dirstate',
+                b'dirstate',
                 (self._filename,),
                 self._writedirstate,
-                location='plain',
+                location=b'plain',
             )
             return
 
-        st = self._opener(filename, "w", atomictemp=True, checkambig=True)
+        st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
         self._writedirstate(st)
 
     def addparentchangecallback(self, category, callback):
@@ -678,12 +678,12 @@
 
         # enough 'delaywrite' prevents 'pack_dirstate' from dropping
         # timestamp of each entries in dirstate, because of 'now > mtime'
-        delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
+        delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
         if delaywrite > 0:
             # do we have any files to delay for?
             items = self._map.iteritems()
             for f, e in items:
-                if e[0] == 'n' and e[3] == now:
+                if e[0] == b'n' and e[3] == now:
                     import time  # to avoid useless import
 
                     # rather than sleep n seconds, sleep until the next
@@ -715,10 +715,10 @@
 
     def _ignorefiles(self):
         files = []
-        if os.path.exists(self._join('.hgignore')):
-            files.append(self._join('.hgignore'))
-        for name, path in self._ui.configitems("ui"):
-            if name == 'ignore' or name.startswith('ignore.'):
+        if os.path.exists(self._join(b'.hgignore')):
+            files.append(self._join(b'.hgignore'))
+        for name, path in self._ui.configitems(b"ui"):
+            if name == b'ignore' or name.startswith(b'ignore.'):
                 # we need to use os.path.join here rather than self._join
                 # because path is arbitrary and user-specified
                 files.append(os.path.join(self._rootdir, util.expandpath(path)))
@@ -733,18 +733,18 @@
                 i, self._ui.warn, sourceinfo=True
             )
             for pattern, lineno, line in patterns:
-                kind, p = matchmod._patsplit(pattern, 'glob')
-                if kind == "subinclude":
+                kind, p = matchmod._patsplit(pattern, b'glob')
+                if kind == b"subinclude":
                     if p not in visited:
                         files.append(p)
                     continue
                 m = matchmod.match(
-                    self._root, '', [], [pattern], warn=self._ui.warn
+                    self._root, b'', [], [pattern], warn=self._ui.warn
                 )
                 if m(f):
                     return (i, lineno, line)
             visited.add(i)
-        return (None, -1, "")
+        return (None, -1, b"")
 
     def _walkexplicit(self, match, subrepos):
         '''Get stat data about the files explicitly specified by match.
@@ -757,18 +757,18 @@
           directories and that were not found.'''
 
         def badtype(mode):
-            kind = _('unknown')
+            kind = _(b'unknown')
             if stat.S_ISCHR(mode):
-                kind = _('character device')
+                kind = _(b'character device')
             elif stat.S_ISBLK(mode):
-                kind = _('block device')
+                kind = _(b'block device')
             elif stat.S_ISFIFO(mode):
-                kind = _('fifo')
+                kind = _(b'fifo')
             elif stat.S_ISSOCK(mode):
-                kind = _('socket')
+                kind = _(b'socket')
             elif stat.S_ISDIR(mode):
-                kind = _('directory')
-            return _('unsupported file type (type is %s)') % kind
+                kind = _(b'directory')
+            return _(b'unsupported file type (type is %s)') % kind
 
         matchedir = match.explicitdir
         badfn = match.bad
@@ -793,7 +793,7 @@
         subrepos.sort()
         i, j = 0, 0
         while i < len(files) and j < len(subrepos):
-            subpath = subrepos[j] + "/"
+            subpath = subrepos[j] + b"/"
             if files[i] < subpath:
                 i += 1
                 continue
@@ -801,13 +801,13 @@
                 del files[i]
             j += 1
 
-        if not files or '' in files:
-            files = ['']
+        if not files or b'' in files:
+            files = [b'']
             # constructing the foldmap is expensive, so don't do it for the
             # common case where files is ['']
             normalize = None
         results = dict.fromkeys(subrepos)
-        results['.hg'] = None
+        results[b'.hg'] = None
 
         for ff in files:
             if normalize:
@@ -849,7 +849,7 @@
         # aren't filtered here because they will be tested later.
         if match.anypats():
             for f in list(results):
-                if f == '.hg' or f in subrepos:
+                if f == b'.hg' or f in subrepos:
                     # keep sentinel to disable further out-of-repo walks
                     continue
                 if not match(f):
@@ -952,11 +952,11 @@
                 visitentries = match.visitchildrenset(nd)
                 if not visitentries:
                     continue
-                if visitentries == 'this' or visitentries == 'all':
+                if visitentries == b'this' or visitentries == b'all':
                     visitentries = None
                 skip = None
-                if nd != '':
-                    skip = '.hg'
+                if nd != b'':
+                    skip = b'.hg'
                 try:
                     entries = listdir(join(nd), stat=True, skip=skip)
                 except OSError as inst:
@@ -982,10 +982,10 @@
                         # interested in comparing it to files currently in the
                         # dmap -- therefore normalizefile is enough
                         nf = normalizefile(
-                            nd and (nd + "/" + f) or f, True, True
+                            nd and (nd + b"/" + f) or f, True, True
                         )
                     else:
-                        nf = nd and (nd + "/" + f) or f
+                        nf = nd and (nd + b"/" + f) or f
                     if nf not in results:
                         if kind == dirkind:
                             if not ignore(nf):
@@ -1016,7 +1016,7 @@
 
         for s in subrepos:
             del results[s]
-        del results['.hg']
+        del results[b'.hg']
 
         # step 3: visit remaining files from dmap
         if not skipstep3 and not exact:
@@ -1134,9 +1134,9 @@
             size = t[2]
             time = t[3]
 
-            if not st and state in "nma":
+            if not st and state in b"nma":
                 dadd(fn)
-            elif state == 'n':
+            elif state == b'n':
                 if (
                     size >= 0
                     and (
@@ -1160,11 +1160,11 @@
                     ladd(fn)
                 elif listclean:
                     cadd(fn)
-            elif state == 'm':
+            elif state == b'm':
                 madd(fn)
-            elif state == 'a':
+            elif state == b'a':
                 aadd(fn)
-            elif state == 'r':
+            elif state == b'r':
                 radd(fn)
 
         return (
@@ -1208,7 +1208,7 @@
         # output file will be used to create backup of dirstate at this point.
         if self._dirty or not self._opener.exists(filename):
             self._writedirstate(
-                self._opener(filename, "w", atomictemp=True, checkambig=True)
+                self._opener(filename, b"w", atomictemp=True, checkambig=True)
             )
 
         if tr:
@@ -1216,16 +1216,16 @@
             # changes written out above, even if dirstate is never
             # changed after this
             tr.addfilegenerator(
-                'dirstate',
+                b'dirstate',
                 (self._filename,),
                 self._writedirstate,
-                location='plain',
+                location=b'plain',
             )
 
             # ensure that pending file written above is unlinked at
             # failure, even if tr.writepending isn't invoked until the
             # end of this transaction
-            tr.registertmp(filename, location='plain')
+            tr.registertmp(filename, location=b'plain')
 
         self._opener.tryunlink(backupname)
         # hardlink backup is okay because _writedirstate is always called
@@ -1291,7 +1291,7 @@
         self._ui = ui
         self._opener = opener
         self._root = root
-        self._filename = 'dirstate'
+        self._filename = b'dirstate'
 
         self._parents = None
         self._dirtyparents = False
@@ -1315,12 +1315,12 @@
         self._map.clear()
         self.copymap.clear()
         self.setparents(nullid, nullid)
-        util.clearcachedproperty(self, "_dirs")
-        util.clearcachedproperty(self, "_alldirs")
-        util.clearcachedproperty(self, "filefoldmap")
-        util.clearcachedproperty(self, "dirfoldmap")
-        util.clearcachedproperty(self, "nonnormalset")
-        util.clearcachedproperty(self, "otherparentset")
+        util.clearcachedproperty(self, b"_dirs")
+        util.clearcachedproperty(self, b"_alldirs")
+        util.clearcachedproperty(self, b"filefoldmap")
+        util.clearcachedproperty(self, b"dirfoldmap")
+        util.clearcachedproperty(self, b"nonnormalset")
+        util.clearcachedproperty(self, b"otherparentset")
 
     def items(self):
         return self._map.iteritems()
@@ -1352,12 +1352,12 @@
 
     def addfile(self, f, oldstate, state, mode, size, mtime):
         """Add a tracked file to the dirstate."""
-        if oldstate in "?r" and r"_dirs" in self.__dict__:
+        if oldstate in b"?r" and r"_dirs" in self.__dict__:
             self._dirs.addpath(f)
-        if oldstate == "?" and r"_alldirs" in self.__dict__:
+        if oldstate == b"?" and r"_alldirs" in self.__dict__:
             self._alldirs.addpath(f)
         self._map[f] = dirstatetuple(state, mode, size, mtime)
-        if state != 'n' or mtime == -1:
+        if state != b'n' or mtime == -1:
             self.nonnormalset.add(f)
         if size == -2:
             self.otherparentset.add(f)
@@ -1370,14 +1370,14 @@
         the file's previous state.  In the future, we should refactor this
         to be more explicit about what that state is.
         """
-        if oldstate not in "?r" and r"_dirs" in self.__dict__:
+        if oldstate not in b"?r" and r"_dirs" in self.__dict__:
             self._dirs.delpath(f)
-        if oldstate == "?" and r"_alldirs" in self.__dict__:
+        if oldstate == b"?" and r"_alldirs" in self.__dict__:
             self._alldirs.addpath(f)
         if r"filefoldmap" in self.__dict__:
             normed = util.normcase(f)
             self.filefoldmap.pop(normed, None)
-        self._map[f] = dirstatetuple('r', 0, size, 0)
+        self._map[f] = dirstatetuple(b'r', 0, size, 0)
         self.nonnormalset.add(f)
 
     def dropfile(self, f, oldstate):
@@ -1387,7 +1387,7 @@
         """
         exists = self._map.pop(f, None) is not None
         if exists:
-            if oldstate != "r" and r"_dirs" in self.__dict__:
+            if oldstate != b"r" and r"_dirs" in self.__dict__:
                 self._dirs.delpath(f)
             if r"_alldirs" in self.__dict__:
                 self._alldirs.delpath(f)
@@ -1400,7 +1400,7 @@
     def clearambiguoustimes(self, files, now):
         for f in files:
             e = self.get(f)
-            if e is not None and e[0] == 'n' and e[3] == now:
+            if e is not None and e[0] == b'n' and e[3] == now:
                 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
                 self.nonnormalset.add(f)
 
@@ -1412,9 +1412,9 @@
             nonnorm = set()
             otherparent = set()
             for fname, e in self._map.iteritems():
-                if e[0] != 'n' or e[3] == -1:
+                if e[0] != b'n' or e[3] == -1:
                     nonnorm.add(fname)
-                if e[0] == 'n' and e[2] == -2:
+                if e[0] == b'n' and e[2] == -2:
                     otherparent.add(fname)
             return nonnorm, otherparent
 
@@ -1435,9 +1435,9 @@
         f = {}
         normcase = util.normcase
         for name, s in self._map.iteritems():
-            if s[0] != 'r':
+            if s[0] != b'r':
                 f[normcase(name)] = name
-        f['.'] = '.'  # prevents useless util.fspath() invocation
+        f[b'.'] = b'.'  # prevents useless util.fspath() invocation
         return f
 
     def hastrackeddir(self, d):
@@ -1456,7 +1456,7 @@
 
     @propertycache
     def _dirs(self):
-        return util.dirs(self._map, 'r')
+        return util.dirs(self._map, b'r')
 
     @propertycache
     def _alldirs(self):
@@ -1467,7 +1467,7 @@
         if self._pendingmode is not None and self._pendingmode != mode:
             fp.close()
             raise error.Abort(
-                _('working directory state may be ' 'changed parallelly')
+                _(b'working directory state may be ' b'changed parallelly')
             )
         self._pendingmode = mode
         return fp
@@ -1482,7 +1482,7 @@
                 if err.errno != errno.ENOENT:
                     raise
                 # File doesn't exist, so the current state is empty
-                st = ''
+                st = b''
 
             l = len(st)
             if l == 40:
@@ -1491,7 +1491,7 @@
                 self._parents = (nullid, nullid)
             else:
                 raise error.Abort(
-                    _('working directory state appears ' 'damaged!')
+                    _(b'working directory state appears ' b'damaged!')
                 )
 
         return self._parents
@@ -1519,7 +1519,7 @@
         if not st:
             return
 
-        if util.safehasattr(parsers, 'dict_new_presized'):
+        if util.safehasattr(parsers, b'dict_new_presized'):
             # Make an estimate of the number of files in the dirstate based on
             # its size. From a linear regression on a set of real-world repos,
             # all over 10,000 files, the size of a dirstate entry is 85
@@ -1595,7 +1595,7 @@
             self._ui = ui
             self._opener = opener
             self._root = root
-            self._filename = 'dirstate'
+            self._filename = b'dirstate'
             self._parents = None
             self._dirtyparents = False
 
@@ -1636,9 +1636,9 @@
         def clear(self):
             self._rustmap.clear()
             self.setparents(nullid, nullid)
-            util.clearcachedproperty(self, "_dirs")
-            util.clearcachedproperty(self, "_alldirs")
-            util.clearcachedproperty(self, "dirfoldmap")
+            util.clearcachedproperty(self, b"_dirs")
+            util.clearcachedproperty(self, b"_alldirs")
+            util.clearcachedproperty(self, b"dirfoldmap")
 
         def items(self):
             return self._rustmap.items()
@@ -1668,7 +1668,7 @@
             if self._pendingmode is not None and self._pendingmode != mode:
                 fp.close()
                 raise error.Abort(
-                    _('working directory state may be ' 'changed parallelly')
+                    _(b'working directory state may be ' b'changed parallelly')
                 )
             self._pendingmode = mode
             return fp
@@ -1688,13 +1688,13 @@
                     if err.errno != errno.ENOENT:
                         raise
                     # File doesn't exist, so the current state is empty
-                    st = ''
+                    st = b''
 
                 try:
                     self._parents = self._rustmap.parents(st)
                 except ValueError:
                     raise error.Abort(
-                        _('working directory state appears ' 'damaged!')
+                        _(b'working directory state appears ' b'damaged!')
                     )
 
             return self._parents
--- a/mercurial/dirstateguard.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/dirstateguard.py	Sun Oct 06 09:48:39 2019 -0400
@@ -34,8 +34,8 @@
         self._repo = repo
         self._active = False
         self._closed = False
-        self._backupname = 'dirstate.backup.%s.%d' % (name, id(self))
-        self._narrowspecbackupname = 'narrowspec.backup.%s.%d' % (
+        self._backupname = b'dirstate.backup.%s.%d' % (name, id(self))
+        self._narrowspecbackupname = b'narrowspec.backup.%s.%d' % (
             name,
             id(self),
         )
@@ -54,7 +54,7 @@
     def close(self):
         if not self._active:  # already inactivated
             msg = (
-                _("can't close already inactivated backup: %s")
+                _(b"can't close already inactivated backup: %s")
                 % self._backupname
             )
             raise error.Abort(msg)
@@ -77,7 +77,7 @@
         if not self._closed:
             if not self._active:  # already inactivated
                 msg = (
-                    _("can't release already inactivated backup: %s")
+                    _(b"can't release already inactivated backup: %s")
                     % self._backupname
                 )
                 raise error.Abort(msg)
--- a/mercurial/discovery.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/discovery.py	Sun Oct 06 09:48:39 2019 -0400
@@ -50,7 +50,7 @@
     extensions a good hook into outgoing.
     """
 
-    if not remote.capable('getbundle'):
+    if not remote.capable(b'getbundle'):
         return treediscovery.findcommonincoming(repo, remote, heads, force)
 
     if heads:
@@ -162,8 +162,8 @@
         og.missingheads = onlyheads or repo.heads()
     elif onlyheads is None:
         # use visible heads as it should be cached
-        og.missingheads = repo.filtered("served").heads()
-        og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
+        og.missingheads = repo.filtered(b"served").heads()
+        og.excluded = [ctx.node() for ctx in repo.set(b'secret() or extinct()')]
     else:
         # compute common, missing and exclude secret stuff
         sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
@@ -222,7 +222,7 @@
         branches.add(ctx.branch())
 
     with remote.commandexecutor() as e:
-        remotemap = e.callcommand('branchmap', {}).result()
+        remotemap = e.callcommand(b'branchmap', {}).result()
 
     knownnode = cl.hasnode  # do not use nodemap until it is filtered
     # A. register remote heads of branches which are in outgoing set
@@ -291,7 +291,7 @@
     # - another element of outgoing.missing
     # - nullrev
     # This explains why the new head are very simple to compute.
-    r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
+    r = repo.set(b'heads(%ln + %ln)', oldheads, outgoing.missing)
     newheads = sorted(c.node() for c in r)
     # set some unsynced head to issue the "unsynced changes" warning
     if inc:
@@ -309,7 +309,7 @@
 
     with remote.commandexecutor() as e:
         remotebookmarks = e.callcommand(
-            'listkeys', {'namespace': 'bookmarks',}
+            b'listkeys', {b'namespace': b'bookmarks',}
         ).result()
 
     bookmarkedheads = set()
@@ -317,7 +317,7 @@
     # internal config: bookmarks.pushing
     newbookmarks = [
         localbookmarks.expandname(b)
-        for b in pushop.ui.configlist('bookmarks', 'pushing')
+        for b in pushop.ui.configlist(b'bookmarks', b'pushing')
     ]
 
     for bm in localbookmarks:
@@ -356,7 +356,7 @@
         # remote is empty, nothing to check.
         return
 
-    if remote.capable('branchmap'):
+    if remote.capable(b'branchmap'):
         headssum = _headssummary(pushop)
     else:
         headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
@@ -366,7 +366,7 @@
     ]
     # 1. Check for new branches on the remote.
     if newbranches and not newbranch:  # new branch requires --new-branch
-        branchnames = ', '.join(sorted(newbranches))
+        branchnames = b', '.join(sorted(newbranches))
         # Calculate how many of the new branches are closed branches
         closedbranches = set()
         for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
@@ -374,13 +374,13 @@
                 closedbranches.add(tag)
         closedbranches = closedbranches & set(newbranches)
         if closedbranches:
-            errmsg = _("push creates new remote branches: %s (%d closed)!") % (
+            errmsg = _(b"push creates new remote branches: %s (%d closed)!") % (
                 branchnames,
                 len(closedbranches),
             )
         else:
-            errmsg = _("push creates new remote branches: %s!") % branchnames
-        hint = _("use 'hg push --new-branch' to create new remote branches")
+            errmsg = _(b"push creates new remote branches: %s!") % branchnames
+        hint = _(b"use 'hg push --new-branch' to create new remote branches")
         raise error.Abort(errmsg, hint=hint)
 
     # 2. Find heads that we need not warn about
@@ -409,18 +409,18 @@
                 heads = scmutil.nodesummaries(repo, unsyncedheads)
             if heads is None:
                 repo.ui.status(
-                    _("remote has heads that are " "not known locally\n")
+                    _(b"remote has heads that are " b"not known locally\n")
                 )
             elif branch is None:
                 repo.ui.status(
-                    _("remote has heads that are " "not known locally: %s\n")
+                    _(b"remote has heads that are " b"not known locally: %s\n")
                     % heads
                 )
             else:
                 repo.ui.status(
                     _(
-                        "remote has heads on branch '%s' that are "
-                        "not known locally: %s\n"
+                        b"remote has heads on branch '%s' that are "
+                        b"not known locally: %s\n"
                     )
                     % (branch, heads)
                 )
@@ -429,49 +429,50 @@
                 dhs = list(newhs)
                 if errormsg is None:
                     errormsg = (
-                        _("push creates new branch '%s' with multiple heads")
+                        _(b"push creates new branch '%s' with multiple heads")
                         % branch
                     )
                     hint = _(
-                        "merge or"
-                        " see 'hg help push' for details about"
-                        " pushing new heads"
+                        b"merge or"
+                        b" see 'hg help push' for details about"
+                        b" pushing new heads"
                     )
         elif len(newhs) > len(oldhs):
             # remove bookmarked or existing remote heads from the new heads list
             dhs = sorted(newhs - nowarnheads - oldhs)
         if dhs:
             if errormsg is None:
-                if branch not in ('default', None):
+                if branch not in (b'default', None):
                     errormsg = _(
-                        "push creates new remote head %s " "on branch '%s'!"
+                        b"push creates new remote head %s " b"on branch '%s'!"
                     ) % (short(dhs[0]), branch)
                 elif repo[dhs[0]].bookmarks():
                     errormsg = _(
-                        "push creates new remote head %s " "with bookmark '%s'!"
+                        b"push creates new remote head %s "
+                        b"with bookmark '%s'!"
                     ) % (short(dhs[0]), repo[dhs[0]].bookmarks()[0])
                 else:
-                    errormsg = _("push creates new remote head %s!") % short(
+                    errormsg = _(b"push creates new remote head %s!") % short(
                         dhs[0]
                     )
                 if unsyncedheads:
                     hint = _(
-                        "pull and merge or"
-                        " see 'hg help push' for details about"
-                        " pushing new heads"
+                        b"pull and merge or"
+                        b" see 'hg help push' for details about"
+                        b" pushing new heads"
                     )
                 else:
                     hint = _(
-                        "merge or"
-                        " see 'hg help push' for details about"
-                        " pushing new heads"
+                        b"merge or"
+                        b" see 'hg help push' for details about"
+                        b" pushing new heads"
                     )
             if branch is None:
-                repo.ui.note(_("new remote heads:\n"))
+                repo.ui.note(_(b"new remote heads:\n"))
             else:
-                repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
+                repo.ui.note(_(b"new remote heads on branch '%s':\n") % branch)
             for h in dhs:
-                repo.ui.note(" %s\n" % short(h))
+                repo.ui.note(b" %s\n" % short(h))
     if errormsg:
         raise error.Abort(errormsg, hint=hint)
 
@@ -513,8 +514,8 @@
         else:
             if successorsmarkers.get(h) is not None:
                 msg = (
-                    'checkheads: remote head unknown locally has'
-                    ' local marker: %s\n'
+                    b'checkheads: remote head unknown locally has'
+                    b' local marker: %s\n'
                 )
                 repo.ui.debug(msg % hex(h))
             unknownheads.add(h)
@@ -533,7 +534,9 @@
 
         # Get all revs/nodes on the branch exclusive to this head
         # (already filtered heads are "ignored"))
-        branchrevs = unfi.revs('only(%n, (%ln+%ln))', nh, localcandidate, newhs)
+        branchrevs = unfi.revs(
+            b'only(%n, (%ln+%ln))', nh, localcandidate, newhs
+        )
         branchnodes = [tonode(r) for r in branchrevs]
 
         # The branch won't be hidden on the remote if
--- a/mercurial/dispatch.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/dispatch.py	Sun Oct 06 09:48:39 2019 -0400
@@ -93,7 +93,7 @@
                 except:  # re-raises below
                     if exc is None:
                         exc = sys.exc_info()[1]
-                    self.ui.warn('error in exit handlers:\n')
+                    self.ui.warn(b'error in exit handlers:\n')
                     self.ui.traceback(force=True)
         finally:
             if exc is not None:
@@ -101,9 +101,9 @@
 
 
 def run():
-    "run the command in sys.argv"
+    b"run the command in sys.argv"
     initstdio()
-    with tracing.log('parse args into request'):
+    with tracing.log(b'parse args into request'):
         req = request(pycompat.sysargv[1:])
     err = None
     try:
@@ -113,18 +113,18 @@
         status = -1
 
     # In all cases we try to flush stdio streams.
-    if util.safehasattr(req.ui, 'fout'):
+    if util.safehasattr(req.ui, b'fout'):
         try:
             req.ui.fout.flush()
         except IOError as e:
             err = e
             status = -1
 
-    if util.safehasattr(req.ui, 'ferr'):
+    if util.safehasattr(req.ui, b'ferr'):
         try:
             if err is not None and err.errno != errno.EPIPE:
                 req.ui.ferr.write(
-                    'abort: %s\n' % encoding.strtolocal(err.strerror)
+                    b'abort: %s\n' % encoding.strtolocal(err.strerror)
                 )
             req.ui.ferr.flush()
         # There's not much we can do about an I/O error here. So (possibly)
@@ -178,10 +178,10 @@
 
 def _reportsimilar(write, similar):
     if len(similar) == 1:
-        write(_("(did you mean %s?)\n") % similar[0])
+        write(_(b"(did you mean %s?)\n") % similar[0])
     elif similar:
-        ss = ", ".join(sorted(similar))
-        write(_("(did you mean one of %s?)\n") % ss)
+        ss = b", ".join(sorted(similar))
+        write(_(b"(did you mean one of %s?)\n") % ss)
 
 
 def _formatparse(write, inst):
@@ -191,25 +191,25 @@
         similar = _getsimilar(inst.symbols, inst.function)
     if len(inst.args) > 1:
         write(
-            _("hg: parse error at %s: %s\n")
+            _(b"hg: parse error at %s: %s\n")
             % (pycompat.bytestr(inst.args[1]), inst.args[0])
         )
-        if inst.args[0].startswith(' '):
-            write(_("unexpected leading whitespace\n"))
+        if inst.args[0].startswith(b' '):
+            write(_(b"unexpected leading whitespace\n"))
     else:
-        write(_("hg: parse error: %s\n") % inst.args[0])
+        write(_(b"hg: parse error: %s\n") % inst.args[0])
         _reportsimilar(write, similar)
     if inst.hint:
-        write(_("(%s)\n") % inst.hint)
+        write(_(b"(%s)\n") % inst.hint)
 
 
 def _formatargs(args):
-    return ' '.join(procutil.shellquote(a) for a in args)
+    return b' '.join(procutil.shellquote(a) for a in args)
 
 
 def dispatch(req):
     """run the command specified in req.args; returns an integer status code"""
-    with tracing.log('dispatch.dispatch'):
+    with tracing.log(b'dispatch.dispatch'):
         if req.ferr:
             ferr = req.ferr
         elif req.ui:
@@ -221,8 +221,8 @@
             if not req.ui:
                 req.ui = uimod.ui.load()
             req.earlyoptions.update(_earlyparseopts(req.ui, req.args))
-            if req.earlyoptions['traceback']:
-                req.ui.setconfig('ui', 'traceback', 'on', '--traceback')
+            if req.earlyoptions[b'traceback']:
+                req.ui.setconfig(b'ui', b'traceback', b'on', b'--traceback')
 
             # set ui streams from the request
             if req.fin:
@@ -234,9 +234,9 @@
             if req.fmsg:
                 req.ui.fmsg = req.fmsg
         except error.Abort as inst:
-            ferr.write(_("abort: %s\n") % inst)
+            ferr.write(_(b"abort: %s\n") % inst)
             if inst.hint:
-                ferr.write(_("(%s)\n") % inst.hint)
+                ferr.write(_(b"(%s)\n") % inst.hint)
             return -1
         except error.ParseError as inst:
             _formatparse(ferr.write, inst)
@@ -248,16 +248,16 @@
         try:
             ret = _runcatch(req) or 0
         except error.ProgrammingError as inst:
-            req.ui.error(_('** ProgrammingError: %s\n') % inst)
+            req.ui.error(_(b'** ProgrammingError: %s\n') % inst)
             if inst.hint:
-                req.ui.error(_('** (%s)\n') % inst.hint)
+                req.ui.error(_(b'** (%s)\n') % inst.hint)
             raise
         except KeyboardInterrupt as inst:
             try:
                 if isinstance(inst, error.SignalInterrupt):
-                    msg = _("killed!\n")
+                    msg = _(b"killed!\n")
                 else:
-                    msg = _("interrupted!\n")
+                    msg = _(b"interrupted!\n")
                 req.ui.error(msg)
             except error.SignalInterrupt:
                 # maybe pager would quit without consuming all the output, and
@@ -271,16 +271,16 @@
             duration = util.timer() - starttime
             req.ui.flush()
             if req.ui.logblockedtimes:
-                req.ui._blockedtimes['command_duration'] = duration * 1000
+                req.ui._blockedtimes[b'command_duration'] = duration * 1000
                 req.ui.log(
-                    'uiblocked',
-                    'ui blocked ms\n',
+                    b'uiblocked',
+                    b'ui blocked ms\n',
                     **pycompat.strkwargs(req.ui._blockedtimes)
                 )
             return_code = ret & 255
             req.ui.log(
-                "commandfinish",
-                "%s exited %d after %0.2f seconds\n",
+                b"commandfinish",
+                b"%s exited %d after %0.2f seconds\n",
                 msg,
                 return_code,
                 duration,
@@ -296,14 +296,14 @@
 
 
 def _runcatch(req):
-    with tracing.log('dispatch._runcatch'):
+    with tracing.log(b'dispatch._runcatch'):
 
         def catchterm(*args):
             raise error.SignalInterrupt
 
         ui = req.ui
         try:
-            for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
+            for name in b'SIGBREAK', b'SIGHUP', b'SIGTERM':
                 num = getattr(signal, name, None)
                 if num:
                     signal.signal(num, catchterm)
@@ -330,7 +330,7 @@
                 # it's not a command that server operators expect to
                 # be safe to offer to users in a sandbox.
                 pass
-            if realcmd == 'serve' and '--stdio' in cmdargs:
+            if realcmd == b'serve' and b'--stdio' in cmdargs:
                 # We want to constrain 'hg serve --stdio' instances pretty
                 # closely, as many shared-ssh access tools want to grant
                 # access to run *only* 'hg -R $repo serve --stdio'. We
@@ -341,38 +341,40 @@
                 # name. This used to actually run the debugger.
                 if (
                     len(req.args) != 4
-                    or req.args[0] != '-R'
-                    or req.args[1].startswith('--')
-                    or req.args[2] != 'serve'
-                    or req.args[3] != '--stdio'
+                    or req.args[0] != b'-R'
+                    or req.args[1].startswith(b'--')
+                    or req.args[2] != b'serve'
+                    or req.args[3] != b'--stdio'
                 ):
                     raise error.Abort(
-                        _('potentially unsafe serve --stdio invocation: %s')
+                        _(b'potentially unsafe serve --stdio invocation: %s')
                         % (stringutil.pprint(req.args),)
                     )
 
             try:
-                debugger = 'pdb'
-                debugtrace = {'pdb': pdb.set_trace}
-                debugmortem = {'pdb': pdb.post_mortem}
+                debugger = b'pdb'
+                debugtrace = {b'pdb': pdb.set_trace}
+                debugmortem = {b'pdb': pdb.post_mortem}
 
                 # read --config before doing anything else
                 # (e.g. to change trust settings for reading .hg/hgrc)
-                cfgs = _parseconfig(req.ui, req.earlyoptions['config'])
+                cfgs = _parseconfig(req.ui, req.earlyoptions[b'config'])
 
                 if req.repo:
                     # copy configs that were passed on the cmdline (--config) to
                     # the repo ui
                     for sec, name, val in cfgs:
-                        req.repo.ui.setconfig(sec, name, val, source='--config')
+                        req.repo.ui.setconfig(
+                            sec, name, val, source=b'--config'
+                        )
 
                 # developer config: ui.debugger
-                debugger = ui.config("ui", "debugger")
+                debugger = ui.config(b"ui", b"debugger")
                 debugmod = pdb
                 if not debugger or ui.plain():
                     # if we are in HGPLAIN mode, then disable custom debugging
-                    debugger = 'pdb'
-                elif req.earlyoptions['debugger']:
+                    debugger = b'pdb'
+                elif req.earlyoptions[b'debugger']:
                     # This import can be slow for fancy debuggers, so only
                     # do it when absolutely necessary, i.e. when actual
                     # debugging has been requested
@@ -386,22 +388,22 @@
                 debugmortem[debugger] = debugmod.post_mortem
 
                 # enter the debugger before command execution
-                if req.earlyoptions['debugger']:
+                if req.earlyoptions[b'debugger']:
                     ui.warn(
                         _(
-                            "entering debugger - "
-                            "type c to continue starting hg or h for help\n"
+                            b"entering debugger - "
+                            b"type c to continue starting hg or h for help\n"
                         )
                     )
 
                     if (
-                        debugger != 'pdb'
-                        and debugtrace[debugger] == debugtrace['pdb']
+                        debugger != b'pdb'
+                        and debugtrace[debugger] == debugtrace[b'pdb']
                     ):
                         ui.warn(
                             _(
-                                "%s debugger specified "
-                                "but its module was not found\n"
+                                b"%s debugger specified "
+                                b"but its module was not found\n"
                             )
                             % debugger
                         )
@@ -413,7 +415,7 @@
                     ui.flush()
             except:  # re-raises
                 # enter the debugger when we hit an exception
-                if req.earlyoptions['debugger']:
+                if req.earlyoptions[b'debugger']:
                     traceback.print_exc()
                     debugmortem[debugger](sys.exc_info()[2])
                 raise
@@ -430,23 +432,23 @@
         return scmutil.callcatch(ui, func)
     except error.AmbiguousCommand as inst:
         ui.warn(
-            _("hg: command '%s' is ambiguous:\n    %s\n")
-            % (inst.args[0], " ".join(inst.args[1]))
+            _(b"hg: command '%s' is ambiguous:\n    %s\n")
+            % (inst.args[0], b" ".join(inst.args[1]))
         )
     except error.CommandError as inst:
         if inst.args[0]:
-            ui.pager('help')
+            ui.pager(b'help')
             msgbytes = pycompat.bytestr(inst.args[1])
-            ui.warn(_("hg %s: %s\n") % (inst.args[0], msgbytes))
+            ui.warn(_(b"hg %s: %s\n") % (inst.args[0], msgbytes))
             commands.help_(ui, inst.args[0], full=False, command=True)
         else:
-            ui.warn(_("hg: %s\n") % inst.args[1])
-            ui.warn(_("(use 'hg help -v' for a list of global options)\n"))
+            ui.warn(_(b"hg: %s\n") % inst.args[1])
+            ui.warn(_(b"(use 'hg help -v' for a list of global options)\n"))
     except error.ParseError as inst:
         _formatparse(ui.warn, inst)
         return -1
     except error.UnknownCommand as inst:
-        nocmdmsg = _("hg: unknown command '%s'\n") % inst.args[0]
+        nocmdmsg = _(b"hg: unknown command '%s'\n") % inst.args[0]
         try:
             # check if the command is in a disabled extension
             # (but don't check for extensions themselves)
@@ -465,7 +467,7 @@
                     suggested = True
             if not suggested:
                 ui.warn(nocmdmsg)
-                ui.warn(_("(use 'hg help' for a list of commands)\n"))
+                ui.warn(_(b"(use 'hg help' for a list of commands)\n"))
     except IOError:
         raise
     except KeyboardInterrupt:
@@ -480,10 +482,10 @@
 def aliasargs(fn, givenargs):
     args = []
     # only care about alias 'args', ignore 'args' set by extensions.wrapfunction
-    if not util.safehasattr(fn, '_origfunc'):
+    if not util.safehasattr(fn, b'_origfunc'):
         args = getattr(fn, 'args', args)
     if args:
-        cmd = ' '.join(map(procutil.shellquote, args))
+        cmd = b' '.join(map(procutil.shellquote, args))
 
         nums = []
 
@@ -492,7 +494,7 @@
             nums.append(num)
             if num < len(givenargs):
                 return givenargs[num]
-            raise error.Abort(_('too few arguments for command alias'))
+            raise error.Abort(_(b'too few arguments for command alias'))
 
         cmd = re.sub(br'\$(\d+|\$)', replacer, cmd)
         givenargs = [x for i, x in enumerate(givenargs) if i not in nums]
@@ -507,17 +509,17 @@
     '''
     # util.interpolate can't deal with "$@" (with quotes) because it's only
     # built to match prefix + patterns.
-    replacemap = dict(('$%d' % (i + 1), arg) for i, arg in enumerate(args))
-    replacemap['$0'] = name
-    replacemap['$$'] = '$'
-    replacemap['$@'] = ' '.join(args)
+    replacemap = dict((b'$%d' % (i + 1), arg) for i, arg in enumerate(args))
+    replacemap[b'$0'] = name
+    replacemap[b'$$'] = b'$'
+    replacemap[b'$@'] = b' '.join(args)
     # Typical Unix shells interpolate "$@" (with quotes) as all the positional
     # parameters, separated out into words. Emulate the same behavior here by
     # quoting the arguments individually. POSIX shells will then typically
     # tokenize each argument into exactly one word.
-    replacemap['"$@"'] = ' '.join(procutil.shellquote(arg) for arg in args)
+    replacemap[b'"$@"'] = b' '.join(procutil.shellquote(arg) for arg in args)
     # escape '\$' for regex
-    regex = '|'.join(replacemap.keys()).replace('$', br'\$')
+    regex = b'|'.join(replacemap.keys()).replace(b'$', br'\$')
     r = re.compile(regex)
     return r.sub(lambda x: replacemap[x.group()], cmd)
 
@@ -525,12 +527,12 @@
 class cmdalias(object):
     def __init__(self, ui, name, definition, cmdtable, source):
         self.name = self.cmd = name
-        self.cmdname = ''
+        self.cmdname = b''
         self.definition = definition
         self.fn = None
         self.givenargs = []
         self.opts = []
-        self.help = ''
+        self.help = b''
         self.badalias = None
         self.unknowncmd = False
         self.source = source
@@ -546,33 +548,33 @@
             self.shadows = False
 
         if not self.definition:
-            self.badalias = _("no definition for alias '%s'") % self.name
+            self.badalias = _(b"no definition for alias '%s'") % self.name
             return
 
-        if self.definition.startswith('!'):
+        if self.definition.startswith(b'!'):
             shdef = self.definition[1:]
             self.shell = True
 
             def fn(ui, *args):
-                env = {'HG_ARGS': ' '.join((self.name,) + args)}
+                env = {b'HG_ARGS': b' '.join((self.name,) + args)}
 
                 def _checkvar(m):
-                    if m.groups()[0] == '$':
+                    if m.groups()[0] == b'$':
                         return m.group()
                     elif int(m.groups()[0]) <= len(args):
                         return m.group()
                     else:
                         ui.debug(
-                            "No argument found for substitution "
-                            "of %i variable in alias '%s' definition.\n"
+                            b"No argument found for substitution "
+                            b"of %i variable in alias '%s' definition.\n"
                             % (int(m.groups()[0]), self.name)
                         )
-                        return ''
+                        return b''
 
                 cmd = re.sub(br'\$(\d+|\$)', _checkvar, shdef)
                 cmd = aliasinterpolate(self.name, args, cmd)
                 return ui.system(
-                    cmd, environ=env, blockedtag='alias_%s' % self.name
+                    cmd, environ=env, blockedtag=b'alias_%s' % self.name
                 )
 
             self.fn = fn
@@ -583,7 +585,7 @@
         try:
             args = pycompat.shlexsplit(self.definition)
         except ValueError as inst:
-            self.badalias = _("error in definition for alias '%s': %s") % (
+            self.badalias = _(b"error in definition for alias '%s': %s") % (
                 self.name,
                 stringutil.forcebytestr(inst),
             )
@@ -591,9 +593,9 @@
         earlyopts, args = _earlysplitopts(args)
         if earlyopts:
             self.badalias = _(
-                "error in definition for alias '%s': %s may "
-                "only be given on the command line"
-            ) % (self.name, '/'.join(pycompat.ziplist(*earlyopts)[0]))
+                b"error in definition for alias '%s': %s may "
+                b"only be given on the command line"
+            ) % (self.name, b'/'.join(pycompat.ziplist(*earlyopts)[0]))
             return
         self.cmdname = cmd = args.pop(0)
         self.givenargs = args
@@ -610,42 +612,43 @@
             self._populatehelp(ui, name, cmd, self.fn, cmdhelp)
 
         except error.UnknownCommand:
-            self.badalias = _("alias '%s' resolves to unknown command '%s'") % (
-                self.name,
-                cmd,
-            )
+            self.badalias = _(
+                b"alias '%s' resolves to unknown command '%s'"
+            ) % (self.name, cmd,)
             self.unknowncmd = True
         except error.AmbiguousCommand:
             self.badalias = _(
-                "alias '%s' resolves to ambiguous command '%s'"
+                b"alias '%s' resolves to ambiguous command '%s'"
             ) % (self.name, cmd)
 
     def _populatehelp(self, ui, name, cmd, fn, defaulthelp=None):
         # confine strings to be passed to i18n.gettext()
         cfg = {}
-        for k in ('doc', 'help', 'category'):
-            v = ui.config('alias', '%s:%s' % (name, k), None)
+        for k in (b'doc', b'help', b'category'):
+            v = ui.config(b'alias', b'%s:%s' % (name, k), None)
             if v is None:
                 continue
             if not encoding.isasciistr(v):
                 self.badalias = _(
-                    "non-ASCII character in alias definition " "'%s:%s'"
+                    b"non-ASCII character in alias definition " b"'%s:%s'"
                 ) % (name, k)
                 return
             cfg[k] = v
 
-        self.help = cfg.get('help', defaulthelp or '')
-        if self.help and self.help.startswith("hg " + cmd):
+        self.help = cfg.get(b'help', defaulthelp or b'')
+        if self.help and self.help.startswith(b"hg " + cmd):
             # drop prefix in old-style help lines so hg shows the alias
             self.help = self.help[4 + len(cmd) :]
 
-        self.owndoc = 'doc' in cfg
-        doc = cfg.get('doc', pycompat.getdoc(fn))
+        self.owndoc = b'doc' in cfg
+        doc = cfg.get(b'doc', pycompat.getdoc(fn))
         if doc is not None:
             doc = pycompat.sysstr(doc)
         self.__doc__ = doc
 
-        self.helpcategory = cfg.get('category', registrar.command.CATEGORY_NONE)
+        self.helpcategory = cfg.get(
+            b'category', registrar.command.CATEGORY_NONE
+        )
 
     @property
     def args(self):
@@ -661,7 +664,7 @@
         }
         if name not in adefaults:
             raise AttributeError(name)
-        if self.badalias or util.safehasattr(self, 'shell'):
+        if self.badalias or util.safehasattr(self, b'shell'):
             return adefaults[name]
         return getattr(self.fn, name)
 
@@ -672,29 +675,29 @@
                 try:
                     # check if the command is in a disabled extension
                     cmd, ext = extensions.disabledcmd(ui, self.cmdname)[:2]
-                    hint = _("'%s' is provided by '%s' extension") % (cmd, ext)
+                    hint = _(b"'%s' is provided by '%s' extension") % (cmd, ext)
                 except error.UnknownCommand:
                     pass
             raise error.Abort(self.badalias, hint=hint)
         if self.shadows:
             ui.debug(
-                "alias '%s' shadows command '%s'\n" % (self.name, self.cmdname)
+                b"alias '%s' shadows command '%s'\n" % (self.name, self.cmdname)
             )
 
         ui.log(
-            'commandalias',
-            "alias '%s' expands to '%s'\n",
+            b'commandalias',
+            b"alias '%s' expands to '%s'\n",
             self.name,
             self.definition,
         )
-        if util.safehasattr(self, 'shell'):
+        if util.safehasattr(self, b'shell'):
             return self.fn(ui, *args, **opts)
         else:
             try:
                 return util.checksignature(self.fn)(ui, *args, **opts)
             except error.SignatureError:
-                args = ' '.join([self.cmdname] + self.args)
-                ui.debug("alias '%s' expands to '%s'\n" % (self.name, args))
+                args = b' '.join([self.cmdname] + self.args)
+                ui.debug(b"alias '%s' expands to '%s'\n" % (self.name, args))
                 raise
 
 
@@ -738,7 +741,7 @@
     # aliases are processed after extensions have been loaded, so they
     # may use extension commands. Aliases can also use other alias definitions,
     # but only if they have been defined prior to the current definition.
-    for alias, definition in ui.configitems('alias', ignoresub=True):
+    for alias, definition in ui.configitems(b'alias', ignoresub=True):
         try:
             if cmdtable[alias].definition == definition:
                 continue
@@ -746,7 +749,7 @@
             # definition might not exist or it might not be a cmdalias
             pass
 
-        source = ui.configsource('alias', alias)
+        source = ui.configsource(b'alias', alias)
         entry = lazyaliasentry(ui, alias, definition, cmdtable, source)
         cmdtable[alias] = entry
 
@@ -763,11 +766,11 @@
     if args:
         cmd, args = args[0], args[1:]
         aliases, entry = cmdutil.findcmd(
-            cmd, commands.table, ui.configbool("ui", "strict")
+            cmd, commands.table, ui.configbool(b"ui", b"strict")
         )
         cmd = aliases[0]
         args = aliasargs(entry[0], args)
-        defaults = ui.config("defaults", cmd)
+        defaults = ui.config(b"defaults", cmd)
         if defaults:
             args = (
                 pycompat.maplist(util.expandpath, pycompat.shlexsplit(defaults))
@@ -802,17 +805,17 @@
 
     for cfg in config:
         try:
-            name, value = [cfgelem.strip() for cfgelem in cfg.split('=', 1)]
-            section, name = name.split('.', 1)
+            name, value = [cfgelem.strip() for cfgelem in cfg.split(b'=', 1)]
+            section, name = name.split(b'.', 1)
             if not section or not name:
                 raise IndexError
-            ui.setconfig(section, name, value, '--config')
+            ui.setconfig(section, name, value, b'--config')
             configs.append((section, name, value))
         except (IndexError, ValueError):
             raise error.Abort(
                 _(
-                    'malformed --config option: %r '
-                    '(use --config section.name=value)'
+                    b'malformed --config option: %r '
+                    b'(use --config section.name=value)'
                 )
                 % pycompat.bytestr(cfg)
             )
@@ -826,18 +829,18 @@
         args,
         commands.globalopts,
         options,
-        gnu=not ui.plain('strictflags'),
+        gnu=not ui.plain(b'strictflags'),
         early=True,
-        optaliases={'repository': ['repo']},
+        optaliases={b'repository': [b'repo']},
     )
     return options
 
 
 def _earlysplitopts(args):
     """Split args into a list of possible early options and remainder args"""
-    shortoptions = 'R:'
+    shortoptions = b'R:'
     # TODO: perhaps 'debugger' should be included
-    longoptions = ['cwd=', 'repository=', 'repo=', 'config=']
+    longoptions = [b'cwd=', b'repository=', b'repo=', b'config=']
     return fancyopts.earlygetopt(
         args, shortoptions, longoptions, gnu=True, keepsep=True
     )
@@ -848,9 +851,9 @@
     hook.hook(
         lui,
         repo,
-        "pre-%s" % cmd,
+        b"pre-%s" % cmd,
         True,
-        args=" ".join(fullargs),
+        args=b" ".join(fullargs),
         pats=cmdpats,
         opts=cmdoptions,
     )
@@ -860,9 +863,9 @@
         hook.hook(
             lui,
             repo,
-            "post-%s" % cmd,
+            b"post-%s" % cmd,
             False,
-            args=" ".join(fullargs),
+            args=b" ".join(fullargs),
             result=ret,
             pats=cmdpats,
             opts=cmdoptions,
@@ -872,9 +875,9 @@
         hook.hook(
             lui,
             repo,
-            "fail-%s" % cmd,
+            b"fail-%s" % cmd,
             False,
-            args=" ".join(fullargs),
+            args=b" ".join(fullargs),
             pats=cmdpats,
             opts=cmdoptions,
         )
@@ -892,20 +895,20 @@
             wd = encoding.getcwd()
         except OSError as e:
             raise error.Abort(
-                _("error getting current working directory: %s")
+                _(b"error getting current working directory: %s")
                 % encoding.strtolocal(e.strerror)
             )
-    path = cmdutil.findrepo(wd) or ""
+    path = cmdutil.findrepo(wd) or b""
     if not path:
         lui = ui
     else:
         lui = ui.copy()
-        lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
+        lui.readconfig(os.path.join(path, b".hg", b"hgrc"), path)
 
     if rpath:
         path = lui.expandpath(rpath)
         lui = ui.copy()
-        lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
+        lui.readconfig(os.path.join(path, b".hg", b"hgrc"), path)
 
     return path, lui
 
@@ -926,7 +929,7 @@
 
     cmd = args[0]
     try:
-        strict = ui.configbool("ui", "strict")
+        strict = ui.configbool(b"ui", b"strict")
         aliases, entry = cmdutil.findcmd(cmd, cmdtable, strict)
     except (error.AmbiguousCommand, error.UnknownCommand):
         return
@@ -934,7 +937,7 @@
     cmd = aliases[0]
     fn = entry[0]
 
-    if cmd and util.safehasattr(fn, 'shell'):
+    if cmd and util.safehasattr(fn, b'shell'):
         # shell alias shouldn't receive early options which are consumed by hg
         _earlyopts, args = _earlysplitopts(args)
         d = lambda: fn(ui, *args[1:])
@@ -948,11 +951,11 @@
     ui = req.ui
 
     # check for cwd
-    cwd = req.earlyoptions['cwd']
+    cwd = req.earlyoptions[b'cwd']
     if cwd:
         os.chdir(cwd)
 
-    rpath = req.earlyoptions['repository']
+    rpath = req.earlyoptions[b'repository']
     path, lui = _getlocal(ui, rpath)
 
     uis = {ui, lui}
@@ -961,20 +964,20 @@
         uis.add(req.repo.ui)
 
     if (
-        req.earlyoptions['verbose']
-        or req.earlyoptions['debug']
-        or req.earlyoptions['quiet']
+        req.earlyoptions[b'verbose']
+        or req.earlyoptions[b'debug']
+        or req.earlyoptions[b'quiet']
     ):
-        for opt in ('verbose', 'debug', 'quiet'):
+        for opt in (b'verbose', b'debug', b'quiet'):
             val = pycompat.bytestr(bool(req.earlyoptions[opt]))
             for ui_ in uis:
-                ui_.setconfig('ui', opt, val, '--' + opt)
+                ui_.setconfig(b'ui', opt, val, b'--' + opt)
 
-    if req.earlyoptions['profile']:
+    if req.earlyoptions[b'profile']:
         for ui_ in uis:
-            ui_.setconfig('profiling', 'enabled', 'true', '--profile')
+            ui_.setconfig(b'profiling', b'enabled', b'true', b'--profile')
 
-    profile = lui.configbool('profiling', 'enabled')
+    profile = lui.configbool(b'profiling', b'enabled')
     with profiling.profile(lui, enabled=profile) as profiler:
         # Configure extensions in phases: uisetup, extsetup, cmdtable, and
         # reposetup
@@ -998,7 +1001,7 @@
             return shellaliasfn()
 
         # check for fallback encoding
-        fallback = lui.config('ui', 'fallbackencoding')
+        fallback = lui.config(b'ui', b'fallbackencoding')
         if fallback:
             encoding.fallbackencoding = fallback
 
@@ -1008,26 +1011,26 @@
         # store the canonical command name in request object for later access
         req.canonical_command = cmd
 
-        if options["config"] != req.earlyoptions["config"]:
-            raise error.Abort(_("option --config may not be abbreviated!"))
-        if options["cwd"] != req.earlyoptions["cwd"]:
-            raise error.Abort(_("option --cwd may not be abbreviated!"))
-        if options["repository"] != req.earlyoptions["repository"]:
+        if options[b"config"] != req.earlyoptions[b"config"]:
+            raise error.Abort(_(b"option --config may not be abbreviated!"))
+        if options[b"cwd"] != req.earlyoptions[b"cwd"]:
+            raise error.Abort(_(b"option --cwd may not be abbreviated!"))
+        if options[b"repository"] != req.earlyoptions[b"repository"]:
             raise error.Abort(
                 _(
-                    "option -R has to be separated from other options (e.g. not "
-                    "-qR) and --repository may only be abbreviated as --repo!"
+                    b"option -R has to be separated from other options (e.g. not "
+                    b"-qR) and --repository may only be abbreviated as --repo!"
                 )
             )
-        if options["debugger"] != req.earlyoptions["debugger"]:
-            raise error.Abort(_("option --debugger may not be abbreviated!"))
+        if options[b"debugger"] != req.earlyoptions[b"debugger"]:
+            raise error.Abort(_(b"option --debugger may not be abbreviated!"))
         # don't validate --profile/--traceback, which can be enabled from now
 
-        if options["encoding"]:
-            encoding.encoding = options["encoding"]
-        if options["encodingmode"]:
-            encoding.encodingmode = options["encodingmode"]
-        if options["time"]:
+        if options[b"encoding"]:
+            encoding.encoding = options[b"encoding"]
+        if options[b"encodingmode"]:
+            encoding.encodingmode = options[b"encodingmode"]
+        if options[b"time"]:
 
             def get_times():
                 t = os.times()
@@ -1041,7 +1044,7 @@
             def print_time():
                 t = get_times()
                 ui.warn(
-                    _("time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n")
+                    _(b"time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n")
                     % (
                         t[4] - s[4],
                         t[0] - s[0],
@@ -1052,42 +1055,42 @@
                 )
 
             ui.atexit(print_time)
-        if options["profile"]:
+        if options[b"profile"]:
             profiler.start()
 
         # if abbreviated version of this were used, take them in account, now
-        if options['verbose'] or options['debug'] or options['quiet']:
-            for opt in ('verbose', 'debug', 'quiet'):
+        if options[b'verbose'] or options[b'debug'] or options[b'quiet']:
+            for opt in (b'verbose', b'debug', b'quiet'):
                 if options[opt] == req.earlyoptions[opt]:
                     continue
                 val = pycompat.bytestr(bool(options[opt]))
                 for ui_ in uis:
-                    ui_.setconfig('ui', opt, val, '--' + opt)
+                    ui_.setconfig(b'ui', opt, val, b'--' + opt)
 
-        if options['traceback']:
+        if options[b'traceback']:
             for ui_ in uis:
-                ui_.setconfig('ui', 'traceback', 'on', '--traceback')
+                ui_.setconfig(b'ui', b'traceback', b'on', b'--traceback')
 
-        if options['noninteractive']:
+        if options[b'noninteractive']:
             for ui_ in uis:
-                ui_.setconfig('ui', 'interactive', 'off', '-y')
+                ui_.setconfig(b'ui', b'interactive', b'off', b'-y')
 
-        if cmdoptions.get('insecure', False):
+        if cmdoptions.get(b'insecure', False):
             for ui_ in uis:
                 ui_.insecureconnections = True
 
         # setup color handling before pager, because setting up pager
         # might cause incorrect console information
-        coloropt = options['color']
+        coloropt = options[b'color']
         for ui_ in uis:
             if coloropt:
-                ui_.setconfig('ui', 'color', coloropt, '--color')
+                ui_.setconfig(b'ui', b'color', coloropt, b'--color')
             color.setup(ui_)
 
-        if stringutil.parsebool(options['pager']):
+        if stringutil.parsebool(options[b'pager']):
             # ui.pager() expects 'internal-always-' prefix in this case
-            ui.pager('internal-always-' + cmd)
-        elif options['pager'] != 'auto':
+            ui.pager(b'internal-always-' + cmd)
+        elif options[b'pager'] != b'auto':
             for ui_ in uis:
                 ui_.disablepager()
 
@@ -1095,12 +1098,12 @@
         for ui_ in uis:
             extensions.populateui(ui_)
 
-        if options['version']:
+        if options[b'version']:
             return commands.version_(ui)
-        if options['help']:
+        if options[b'help']:
             return commands.help_(ui, cmd, command=cmd is not None)
         elif not cmd:
-            return commands.help_(ui, 'shortlist')
+            return commands.help_(ui, b'shortlist')
 
         repo = None
         cmdpats = args[:]
@@ -1125,10 +1128,10 @@
                     )
                     if not repo.local():
                         raise error.Abort(
-                            _("repository '%s' is not local") % path
+                            _(b"repository '%s' is not local") % path
                         )
                     repo.ui.setconfig(
-                        "bundle", "mainreporoot", repo.root, 'repo'
+                        b"bundle", b"mainreporoot", repo.root, b'repo'
                     )
                 except error.RequirementError:
                     raise
@@ -1141,28 +1144,28 @@
                             repos = pycompat.maplist(cmdutil.findrepo, args)
                             guess = repos[0]
                             if guess and repos.count(guess) == len(repos):
-                                req.args = ['--repository', guess] + fullargs
-                                req.earlyoptions['repository'] = guess
+                                req.args = [b'--repository', guess] + fullargs
+                                req.earlyoptions[b'repository'] = guess
                                 return _dispatch(req)
                         if not path:
                             raise error.RepoError(
                                 _(
-                                    "no repository found in"
-                                    " '%s' (.hg not found)"
+                                    b"no repository found in"
+                                    b" '%s' (.hg not found)"
                                 )
                                 % encoding.getcwd()
                             )
                         raise
             if repo:
                 ui = repo.ui
-                if options['hidden']:
+                if options[b'hidden']:
                     repo = repo.unfiltered()
             args.insert(0, repo)
         elif rpath:
-            ui.warn(_("warning: --repository ignored\n"))
+            ui.warn(_(b"warning: --repository ignored\n"))
 
         msg = _formatargs(fullargs)
-        ui.log("command", '%s\n', msg)
+        ui.log(b"command", b'%s\n', msg)
         strcmdopt = pycompat.strkwargs(cmdoptions)
         d = lambda: util.checksignature(func)(ui, *args, **strcmdopt)
         try:
@@ -1177,10 +1180,10 @@
 def _runcommand(ui, options, cmd, cmdfunc):
     """Run a command function, possibly with profiling enabled."""
     try:
-        with tracing.log("Running %s command" % cmd):
+        with tracing.log(b"Running %s command" % cmd):
             return cmdfunc()
     except error.SignatureError:
-        raise error.CommandError(cmd, _('invalid arguments'))
+        raise error.CommandError(cmd, _(b'invalid arguments'))
 
 
 def _exceptionwarning(ui):
@@ -1194,16 +1197,18 @@
     # of date) will be clueful enough to notice the implausible
     # version number and try updating.
     ct = util.versiontuple(n=2)
-    worst = None, ct, ''
-    if ui.config('ui', 'supportcontact') is None:
+    worst = None, ct, b''
+    if ui.config(b'ui', b'supportcontact') is None:
         for name, mod in extensions.extensions():
             # 'testedwith' should be bytes, but not all extensions are ported
             # to py3 and we don't want UnicodeException because of that.
-            testedwith = stringutil.forcebytestr(getattr(mod, 'testedwith', ''))
-            report = getattr(mod, 'buglink', _('the extension author.'))
+            testedwith = stringutil.forcebytestr(
+                getattr(mod, 'testedwith', b'')
+            )
+            report = getattr(mod, 'buglink', _(b'the extension author.'))
             if not testedwith.strip():
                 # We found an untested extension. It's likely the culprit.
-                worst = name, 'unknown', report
+                worst = name, b'unknown', report
                 break
 
             # Never blame on extensions bundled with Mercurial.
@@ -1221,35 +1226,35 @@
     if worst[0] is not None:
         name, testedwith, report = worst
         if not isinstance(testedwith, (bytes, str)):
-            testedwith = '.'.join(
+            testedwith = b'.'.join(
                 [stringutil.forcebytestr(c) for c in testedwith]
             )
         warning = _(
-            '** Unknown exception encountered with '
-            'possibly-broken third-party extension %s\n'
-            '** which supports versions %s of Mercurial.\n'
-            '** Please disable %s and try your action again.\n'
-            '** If that fixes the bug please report it to %s\n'
+            b'** Unknown exception encountered with '
+            b'possibly-broken third-party extension %s\n'
+            b'** which supports versions %s of Mercurial.\n'
+            b'** Please disable %s and try your action again.\n'
+            b'** If that fixes the bug please report it to %s\n'
         ) % (name, testedwith, name, stringutil.forcebytestr(report))
     else:
-        bugtracker = ui.config('ui', 'supportcontact')
+        bugtracker = ui.config(b'ui', b'supportcontact')
         if bugtracker is None:
-            bugtracker = _("https://mercurial-scm.org/wiki/BugTracker")
+            bugtracker = _(b"https://mercurial-scm.org/wiki/BugTracker")
         warning = (
             _(
-                "** unknown exception encountered, "
-                "please report by visiting\n** "
+                b"** unknown exception encountered, "
+                b"please report by visiting\n** "
             )
             + bugtracker
-            + '\n'
+            + b'\n'
         )
-    sysversion = pycompat.sysbytes(sys.version).replace('\n', '')
+    sysversion = pycompat.sysbytes(sys.version).replace(b'\n', b'')
     warning += (
-        (_("** Python %s\n") % sysversion)
-        + (_("** Mercurial Distributed SCM (version %s)\n") % util.version())
+        (_(b"** Python %s\n") % sysversion)
+        + (_(b"** Mercurial Distributed SCM (version %s)\n") % util.version())
         + (
-            _("** Extensions loaded: %s\n")
-            % ", ".join([x[0] for x in extensions.extensions()])
+            _(b"** Extensions loaded: %s\n")
+            % b", ".join([x[0] for x in extensions.extensions()])
         )
     )
     return warning
@@ -1263,8 +1268,8 @@
     """
     warning = _exceptionwarning(ui)
     ui.log(
-        "commandexception",
-        "%s\n%s\n",
+        b"commandexception",
+        b"%s\n%s\n",
         warning,
         pycompat.sysbytes(traceback.format_exc()),
     )
--- a/mercurial/encoding.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/encoding.py	Sun Oct 06 09:48:39 2019 -0400
@@ -36,11 +36,11 @@
 # sanity.
 _ignore = [
     unichr(int(x, 16)).encode("utf-8")
-    for x in "200c 200d 200e 200f 202a 202b 202c 202d 202e "
-    "206a 206b 206c 206d 206e 206f feff".split()
+    for x in b"200c 200d 200e 200f 202a 202b 202c 202d 202e "
+    b"206a 206b 206c 206d 206e 206f feff".split()
 ]
 # verify the next function will work
-assert all(i.startswith(("\xe2", "\xef")) for i in _ignore)
+assert all(i.startswith((b"\xe2", b"\xef")) for i in _ignore)
 
 
 def hfsignoreclean(s):
@@ -51,9 +51,9 @@
     >>> hfsignoreclean(u'.h\ufeffg'.encode('utf-8'))
     '.hg'
     """
-    if "\xe2" in s or "\xef" in s:
+    if b"\xe2" in s or b"\xef" in s:
         for c in _ignore:
-            s = s.replace(c, '')
+            s = s.replace(c, b'')
     return s
 
 
@@ -73,24 +73,24 @@
     )
 
 _encodingrewrites = {
-    '646': 'ascii',
-    'ANSI_X3.4-1968': 'ascii',
+    b'646': b'ascii',
+    b'ANSI_X3.4-1968': b'ascii',
 }
 # cp65001 is a Windows variant of utf-8, which isn't supported on Python 2.
 # No idea if it should be rewritten to the canonical name 'utf-8' on Python 3.
 # https://bugs.python.org/issue13216
 if pycompat.iswindows and not pycompat.ispy3:
-    _encodingrewrites['cp65001'] = 'utf-8'
+    _encodingrewrites[b'cp65001'] = b'utf-8'
 
 try:
-    encoding = environ.get("HGENCODING")
+    encoding = environ.get(b"HGENCODING")
     if not encoding:
-        encoding = locale.getpreferredencoding().encode('ascii') or 'ascii'
+        encoding = locale.getpreferredencoding().encode('ascii') or b'ascii'
         encoding = _encodingrewrites.get(encoding, encoding)
 except locale.Error:
-    encoding = 'ascii'
-encodingmode = environ.get("HGENCODINGMODE", "strict")
-fallbackencoding = 'ISO-8859-1'
+    encoding = b'ascii'
+encodingmode = environ.get(b"HGENCODINGMODE", b"strict")
+fallbackencoding = b'ISO-8859-1'
 
 
 class localstr(bytes):
@@ -158,7 +158,7 @@
         try:
             # make sure string is actually stored in UTF-8
             u = s.decode('UTF-8')
-            if encoding == 'UTF-8':
+            if encoding == b'UTF-8':
                 # fast path
                 return s
             r = u.encode(_sysstr(encoding), r"replace")
@@ -180,7 +180,7 @@
                 # can't round-trip
                 return u.encode(_sysstr(encoding), r"replace")
     except LookupError as k:
-        raise error.Abort(k, hint="please check your locale settings")
+        raise error.Abort(k, hint=b"please check your locale settings")
 
 
 def fromlocal(s):
@@ -206,10 +206,10 @@
     except UnicodeDecodeError as inst:
         sub = s[max(0, inst.start - 10) : inst.start + 10]
         raise error.Abort(
-            "decoding near '%s': %s!" % (sub, pycompat.bytestr(inst))
+            b"decoding near '%s': %s!" % (sub, pycompat.bytestr(inst))
         )
     except LookupError as k:
-        raise error.Abort(k, hint="please check your locale settings")
+        raise error.Abort(k, hint=b"please check your locale settings")
 
 
 def unitolocal(u):
@@ -266,17 +266,19 @@
 
 # How to treat ambiguous-width characters. Set to 'wide' to treat as wide.
 _wide = _sysstr(
-    environ.get("HGENCODINGAMBIGUOUS", "narrow") == "wide" and "WFA" or "WF"
+    environ.get(b"HGENCODINGAMBIGUOUS", b"narrow") == b"wide"
+    and b"WFA"
+    or b"WF"
 )
 
 
 def colwidth(s):
-    "Find the column width of a string for display in the local encoding"
+    b"Find the column width of a string for display in the local encoding"
     return ucolwidth(s.decode(_sysstr(encoding), r'replace'))
 
 
 def ucolwidth(d):
-    "Find the column width of a Unicode string for display"
+    b"Find the column width of a Unicode string for display"
     eaw = getattr(unicodedata, 'east_asian_width', None)
     if eaw is not None:
         return sum([eaw(c) in _wide and 2 or 1 for c in d])
@@ -292,7 +294,7 @@
             return t
 
 
-def trim(s, width, ellipsis='', leftside=False):
+def trim(s, width, ellipsis=b'', leftside=False):
     """Trim string 's' to at most 'width' columns (including 'ellipsis').
 
     If 'leftside' is True, left side of string 's' is trimmed.
@@ -390,7 +392,7 @@
 
 
 def lower(s):
-    "best-effort encoding-aware case-folding of local string s"
+    b"best-effort encoding-aware case-folding of local string s"
     try:
         return asciilower(s)
     except UnicodeDecodeError:
@@ -408,11 +410,11 @@
     except UnicodeError:
         return s.lower()  # we don't know how to fold this except in ASCII
     except LookupError as k:
-        raise error.Abort(k, hint="please check your locale settings")
+        raise error.Abort(k, hint=b"please check your locale settings")
 
 
 def upper(s):
-    "best-effort encoding-aware case-folding of local string s"
+    b"best-effort encoding-aware case-folding of local string s"
     try:
         return asciiupper(s)
     except UnicodeDecodeError:
@@ -433,7 +435,7 @@
     except UnicodeError:
         return s.upper()  # we don't know how to fold this except in ASCII
     except LookupError as k:
-        raise error.Abort(k, hint="please check your locale settings")
+        raise error.Abort(k, hint=b"please check your locale settings")
 
 
 class normcasespecs(object):
@@ -575,7 +577,7 @@
         return fromlocal(s)
     elif isasciistr(s):
         return s
-    if "\xed" not in s:
+    if b"\xed" not in s:
         try:
             s.decode('utf-8', _utf8strict)
             return s
@@ -583,13 +585,13 @@
             pass
 
     s = pycompat.bytestr(s)
-    r = ""
+    r = b""
     pos = 0
     l = len(s)
     while pos < l:
         try:
             c = getutf8char(s, pos)
-            if "\xed\xb0\x80" <= c <= "\xed\xb3\xbf":
+            if b"\xed\xb0\x80" <= c <= b"\xed\xb3\xbf":
                 # have to re-escape existing U+DCxx characters
                 c = unichr(0xDC00 + ord(s[pos])).encode('utf-8', _utf8strict)
                 pos += 1
@@ -628,7 +630,7 @@
     if isasciistr(s):
         return s
     # fast path - look for uDxxx prefixes in s
-    if "\xed" not in s:
+    if b"\xed" not in s:
         return s
 
     # We could do this with the unicode type but some Python builds
@@ -637,14 +639,14 @@
     # helper again to walk the string without "decoding" it.
 
     s = pycompat.bytestr(s)
-    r = ""
+    r = b""
     pos = 0
     l = len(s)
     while pos < l:
         c = getutf8char(s, pos)
         pos += len(c)
         # unescape U+DCxx characters
-        if "\xed\xb0\x80" <= c <= "\xed\xb3\xbf":
+        if b"\xed\xb0\x80" <= c <= b"\xed\xb3\xbf":
             c = pycompat.bytechr(ord(c.decode("utf-8", _utf8strict)) & 0xFF)
         r += c
     return r
--- a/mercurial/error.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/error.py	Sun Oct 06 09:48:39 2019 -0400
@@ -73,7 +73,7 @@
             from .node import short
 
             name = short(name)
-        RevlogError.__init__(self, '%s@%s: %s' % (index, name, message))
+        RevlogError.__init__(self, b'%s@%s: %s' % (index, name, message))
 
     def __bytes__(self):
         return RevlogError.__bytes__(self)
@@ -150,7 +150,7 @@
     def __init__(self):
         from .i18n import _
 
-        Abort.__init__(self, _('response expected'))
+        Abort.__init__(self, _(b'response expected'))
 
 
 class OutOfBandError(Hint, Exception):
@@ -175,7 +175,7 @@
     def __init__(self, function, symbols):
         from .i18n import _
 
-        ParseError.__init__(self, _("unknown identifier: %s") % function)
+        ParseError.__init__(self, _(b"unknown identifier: %s") % function)
         self.function = function
         self.symbols = symbols
 
@@ -214,13 +214,13 @@
         from .i18n import _
 
         self.recordtypes = sorted(recordtypes)
-        s = ' '.join(self.recordtypes)
+        s = b' '.join(self.recordtypes)
         Abort.__init__(
             self,
-            _('unsupported merge state records: %s') % s,
+            _(b'unsupported merge state records: %s') % s,
             hint=_(
-                'see https://mercurial-scm.org/wiki/MergeStateRecords for '
-                'more information'
+                b'see https://mercurial-scm.org/wiki/MergeStateRecords for '
+                b'more information'
             ),
         )
 
@@ -244,7 +244,7 @@
 
 class LockHeld(LockError):
     def __init__(self, errno, filename, desc, locker):
-        LockError.__init__(self, errno, 'Lock held', filename, desc)
+        LockError.__init__(self, errno, b'Lock held', filename, desc)
         self.locker = locker
 
 
@@ -322,7 +322,7 @@
         self.params = params
         self.values = values
         if self.parttype is None:
-            msg = 'Stream Parameter'
+            msg = b'Stream Parameter'
         else:
             msg = parttype
         entries = self.params
@@ -334,9 +334,9 @@
                 if val is None:
                     entries.append(val)
                 else:
-                    entries.append("%s=%r" % (par, pycompat.maybebytestr(val)))
+                    entries.append(b"%s=%r" % (par, pycompat.maybebytestr(val)))
         if entries:
-            msg = '%s - %s' % (msg, ', '.join(entries))
+            msg = b'%s - %s' % (msg, b', '.join(entries))
         ValueError.__init__(self, msg)
 
 
@@ -360,7 +360,7 @@
         self.ret = ret
         # no i18n expected to be processed into a better message
         Abort.__init__(
-            self, 'failed to update value for "%s/%s"' % (namespace, key)
+            self, b'failed to update value for "%s/%s"' % (namespace, key)
         )
 
 
@@ -373,7 +373,7 @@
     def __init__(self, filename, node, tombstone):
         from .node import short
 
-        StorageError.__init__(self, '%s:%s' % (filename, short(node)))
+        StorageError.__init__(self, b'%s:%s' % (filename, short(node)))
         self.tombstone = tombstone
 
 
--- a/mercurial/exchange.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/exchange.py	Sun Oct 06 09:48:39 2019 -0400
@@ -44,49 +44,49 @@
 urlerr = util.urlerr
 urlreq = util.urlreq
 
-_NARROWACL_SECTION = 'narrowacl'
+_NARROWACL_SECTION = b'narrowacl'
 
 # Maps bundle version human names to changegroup versions.
 _bundlespeccgversions = {
-    'v1': '01',
-    'v2': '02',
-    'packed1': 's1',
-    'bundle2': '02',  # legacy
+    b'v1': b'01',
+    b'v2': b'02',
+    b'packed1': b's1',
+    b'bundle2': b'02',  # legacy
 }
 
 # Maps bundle version with content opts to choose which part to bundle
 _bundlespeccontentopts = {
-    'v1': {
-        'changegroup': True,
-        'cg.version': '01',
-        'obsolescence': False,
-        'phases': False,
-        'tagsfnodescache': False,
-        'revbranchcache': False,
+    b'v1': {
+        b'changegroup': True,
+        b'cg.version': b'01',
+        b'obsolescence': False,
+        b'phases': False,
+        b'tagsfnodescache': False,
+        b'revbranchcache': False,
     },
-    'v2': {
-        'changegroup': True,
-        'cg.version': '02',
-        'obsolescence': False,
-        'phases': False,
-        'tagsfnodescache': True,
-        'revbranchcache': True,
+    b'v2': {
+        b'changegroup': True,
+        b'cg.version': b'02',
+        b'obsolescence': False,
+        b'phases': False,
+        b'tagsfnodescache': True,
+        b'revbranchcache': True,
     },
-    'packed1': {'cg.version': 's1'},
+    b'packed1': {b'cg.version': b's1'},
 }
-_bundlespeccontentopts['bundle2'] = _bundlespeccontentopts['v2']
+_bundlespeccontentopts[b'bundle2'] = _bundlespeccontentopts[b'v2']
 
 _bundlespecvariants = {
-    "streamv2": {
-        "changegroup": False,
-        "streamv2": True,
-        "tagsfnodescache": False,
-        "revbranchcache": False,
+    b"streamv2": {
+        b"changegroup": False,
+        b"streamv2": True,
+        b"tagsfnodescache": False,
+        b"revbranchcache": False,
     }
 }
 
 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
-_bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
+_bundlespecv1compengines = {b'gzip', b'bzip2', b'none'}
 
 
 @attr.s
@@ -134,51 +134,51 @@
     """
 
     def parseparams(s):
-        if ';' not in s:
+        if b';' not in s:
             return s, {}
 
         params = {}
-        version, paramstr = s.split(';', 1)
-
-        for p in paramstr.split(';'):
-            if '=' not in p:
+        version, paramstr = s.split(b';', 1)
+
+        for p in paramstr.split(b';'):
+            if b'=' not in p:
                 raise error.InvalidBundleSpecification(
                     _(
-                        'invalid bundle specification: '
-                        'missing "=" in parameter: %s'
+                        b'invalid bundle specification: '
+                        b'missing "=" in parameter: %s'
                     )
                     % p
                 )
 
-            key, value = p.split('=', 1)
+            key, value = p.split(b'=', 1)
             key = urlreq.unquote(key)
             value = urlreq.unquote(value)
             params[key] = value
 
         return version, params
 
-    if strict and '-' not in spec:
+    if strict and b'-' not in spec:
         raise error.InvalidBundleSpecification(
             _(
-                'invalid bundle specification; '
-                'must be prefixed with compression: %s'
+                b'invalid bundle specification; '
+                b'must be prefixed with compression: %s'
             )
             % spec
         )
 
-    if '-' in spec:
-        compression, version = spec.split('-', 1)
+    if b'-' in spec:
+        compression, version = spec.split(b'-', 1)
 
         if compression not in util.compengines.supportedbundlenames:
             raise error.UnsupportedBundleSpecification(
-                _('%s compression is not supported') % compression
+                _(b'%s compression is not supported') % compression
             )
 
         version, params = parseparams(version)
 
         if version not in _bundlespeccgversions:
             raise error.UnsupportedBundleSpecification(
-                _('%s is not a recognized bundle version') % version
+                _(b'%s is not a recognized bundle version') % version
             )
     else:
         # Value could be just the compression or just the version, in which
@@ -189,49 +189,49 @@
 
         if spec in util.compengines.supportedbundlenames:
             compression = spec
-            version = 'v1'
+            version = b'v1'
             # Generaldelta repos require v2.
-            if 'generaldelta' in repo.requirements:
-                version = 'v2'
+            if b'generaldelta' in repo.requirements:
+                version = b'v2'
             # Modern compression engines require v2.
             if compression not in _bundlespecv1compengines:
-                version = 'v2'
+                version = b'v2'
         elif spec in _bundlespeccgversions:
-            if spec == 'packed1':
-                compression = 'none'
+            if spec == b'packed1':
+                compression = b'none'
             else:
-                compression = 'bzip2'
+                compression = b'bzip2'
             version = spec
         else:
             raise error.UnsupportedBundleSpecification(
-                _('%s is not a recognized bundle specification') % spec
+                _(b'%s is not a recognized bundle specification') % spec
             )
 
     # Bundle version 1 only supports a known set of compression engines.
-    if version == 'v1' and compression not in _bundlespecv1compengines:
+    if version == b'v1' and compression not in _bundlespecv1compengines:
         raise error.UnsupportedBundleSpecification(
-            _('compression engine %s is not supported on v1 bundles')
+            _(b'compression engine %s is not supported on v1 bundles')
             % compression
         )
 
     # The specification for packed1 can optionally declare the data formats
     # required to apply it. If we see this metadata, compare against what the
     # repo supports and error if the bundle isn't compatible.
-    if version == 'packed1' and 'requirements' in params:
-        requirements = set(params['requirements'].split(','))
+    if version == b'packed1' and b'requirements' in params:
+        requirements = set(params[b'requirements'].split(b','))
         missingreqs = requirements - repo.supportedformats
         if missingreqs:
             raise error.UnsupportedBundleSpecification(
-                _('missing support for repository features: %s')
-                % ', '.join(sorted(missingreqs))
+                _(b'missing support for repository features: %s')
+                % b', '.join(sorted(missingreqs))
             )
 
     # Compute contentopts based on the version
     contentopts = _bundlespeccontentopts.get(version, {}).copy()
 
     # Process the variants
-    if "stream" in params and params["stream"] == "v2":
-        variant = _bundlespecvariants["streamv2"]
+    if b"stream" in params and params[b"stream"] == b"v2":
+        variant = _bundlespecvariants[b"streamv2"]
         contentopts.update(variant)
 
     engine = util.compengines.forbundlename(compression)
@@ -248,28 +248,30 @@
 
     alg = None
     if not fname:
-        fname = "stream"
-        if not header.startswith('HG') and header.startswith('\0'):
+        fname = b"stream"
+        if not header.startswith(b'HG') and header.startswith(b'\0'):
             fh = changegroup.headerlessfixup(fh, header)
-            header = "HG10"
-            alg = 'UN'
+            header = b"HG10"
+            alg = b'UN'
     elif vfs:
         fname = vfs.join(fname)
 
     magic, version = header[0:2], header[2:4]
 
-    if magic != 'HG':
-        raise error.Abort(_('%s: not a Mercurial bundle') % fname)
-    if version == '10':
+    if magic != b'HG':
+        raise error.Abort(_(b'%s: not a Mercurial bundle') % fname)
+    if version == b'10':
         if alg is None:
             alg = changegroup.readexactly(fh, 2)
         return changegroup.cg1unpacker(fh, alg)
-    elif version.startswith('2'):
+    elif version.startswith(b'2'):
         return bundle2.getunbundler(ui, fh, magicstring=magic + version)
-    elif version == 'S1':
+    elif version == b'S1':
         return streamclone.streamcloneapplier(fh)
     else:
-        raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
+        raise error.Abort(
+            _(b'%s: unknown bundle version %s') % (fname, version)
+        )
 
 
 def getbundlespec(ui, fh):
@@ -288,54 +290,56 @@
     b = readbundle(ui, fh, None)
     if isinstance(b, changegroup.cg1unpacker):
         alg = b._type
-        if alg == '_truncatedBZ':
-            alg = 'BZ'
+        if alg == b'_truncatedBZ':
+            alg = b'BZ'
         comp = speccompression(alg)
         if not comp:
-            raise error.Abort(_('unknown compression algorithm: %s') % alg)
-        return '%s-v1' % comp
+            raise error.Abort(_(b'unknown compression algorithm: %s') % alg)
+        return b'%s-v1' % comp
     elif isinstance(b, bundle2.unbundle20):
-        if 'Compression' in b.params:
-            comp = speccompression(b.params['Compression'])
+        if b'Compression' in b.params:
+            comp = speccompression(b.params[b'Compression'])
             if not comp:
-                raise error.Abort(_('unknown compression algorithm: %s') % comp)
+                raise error.Abort(
+                    _(b'unknown compression algorithm: %s') % comp
+                )
         else:
-            comp = 'none'
+            comp = b'none'
 
         version = None
         for part in b.iterparts():
-            if part.type == 'changegroup':
-                version = part.params['version']
-                if version in ('01', '02'):
-                    version = 'v2'
+            if part.type == b'changegroup':
+                version = part.params[b'version']
+                if version in (b'01', b'02'):
+                    version = b'v2'
                 else:
                     raise error.Abort(
                         _(
-                            'changegroup version %s does not have '
-                            'a known bundlespec'
+                            b'changegroup version %s does not have '
+                            b'a known bundlespec'
                         )
                         % version,
-                        hint=_('try upgrading your Mercurial ' 'client'),
+                        hint=_(b'try upgrading your Mercurial ' b'client'),
                     )
-            elif part.type == 'stream2' and version is None:
+            elif part.type == b'stream2' and version is None:
                 # A stream2 part requires to be part of a v2 bundle
-                requirements = urlreq.unquote(part.params['requirements'])
+                requirements = urlreq.unquote(part.params[b'requirements'])
                 splitted = requirements.split()
                 params = bundle2._formatrequirementsparams(splitted)
-                return 'none-v2;stream=v2;%s' % params
+                return b'none-v2;stream=v2;%s' % params
 
         if not version:
             raise error.Abort(
-                _('could not identify changegroup version in ' 'bundle')
+                _(b'could not identify changegroup version in ' b'bundle')
             )
 
-        return '%s-%s' % (comp, version)
+        return b'%s-%s' % (comp, version)
     elif isinstance(b, streamclone.streamcloneapplier):
         requirements = streamclone.readbundle1header(fh)[2]
         formatted = bundle2._formatrequirementsparams(requirements)
-        return 'none-packed1;%s' % formatted
+        return b'none-packed1;%s' % formatted
     else:
-        raise error.Abort(_('unknown bundle type: %s') % b)
+        raise error.Abort(_(b'unknown bundle type: %s') % b)
 
 
 def _computeoutgoing(repo, heads, common):
@@ -361,30 +365,33 @@
 def _checkpublish(pushop):
     repo = pushop.repo
     ui = repo.ui
-    behavior = ui.config('experimental', 'auto-publish')
-    if pushop.publish or behavior not in ('warn', 'confirm', 'abort'):
+    behavior = ui.config(b'experimental', b'auto-publish')
+    if pushop.publish or behavior not in (b'warn', b'confirm', b'abort'):
         return
-    remotephases = listkeys(pushop.remote, 'phases')
-    if not remotephases.get('publishing', False):
+    remotephases = listkeys(pushop.remote, b'phases')
+    if not remotephases.get(b'publishing', False):
         return
 
     if pushop.revs is None:
-        published = repo.filtered('served').revs('not public()')
+        published = repo.filtered(b'served').revs(b'not public()')
     else:
-        published = repo.revs('::%ln - public()', pushop.revs)
+        published = repo.revs(b'::%ln - public()', pushop.revs)
     if published:
-        if behavior == 'warn':
-            ui.warn(_('%i changesets about to be published\n') % len(published))
-        elif behavior == 'confirm':
+        if behavior == b'warn':
+            ui.warn(
+                _(b'%i changesets about to be published\n') % len(published)
+            )
+        elif behavior == b'confirm':
             if ui.promptchoice(
-                _('push and publish %i changesets (yn)?' '$$ &Yes $$ &No')
+                _(b'push and publish %i changesets (yn)?' b'$$ &Yes $$ &No')
                 % len(published)
             ):
-                raise error.Abort(_('user quit'))
-        elif behavior == 'abort':
-            msg = _('push would publish %i changesets') % len(published)
+                raise error.Abort(_(b'user quit'))
+        elif behavior == b'abort':
+            msg = _(b'push would publish %i changesets') % len(published)
             hint = _(
-                "use --publish or adjust 'experimental.auto-publish'" " config"
+                b"use --publish or adjust 'experimental.auto-publish'"
+                b" config"
             )
             raise error.Abort(msg, hint=hint)
 
@@ -400,9 +407,9 @@
     # should be used.
     #
     # developer config: devel.legacy.exchange
-    exchange = ui.configlist('devel', 'legacy.exchange')
-    forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
-    return forcebundle1 or not op.remote.capable('bundle2')
+    exchange = ui.configlist(b'devel', b'legacy.exchange')
+    forcebundle1 = b'bundle2' not in exchange and b'bundle1' in exchange
+    return forcebundle1 or not op.remote.capable(b'bundle2')
 
 
 class pushoperation(object):
@@ -521,7 +528,7 @@
         # and
         # * commonheads parents on missing
         revset = unfi.set(
-            '%ln and parents(roots(%ln))',
+            b'%ln and parents(roots(%ln))',
             self.outgoing.commonheads,
             self.outgoing.missing,
         )
@@ -539,17 +546,17 @@
 
 # mapping of message used when pushing bookmark
 bookmsgmap = {
-    'update': (
-        _("updating bookmark %s\n"),
-        _('updating bookmark %s failed!\n'),
+    b'update': (
+        _(b"updating bookmark %s\n"),
+        _(b'updating bookmark %s failed!\n'),
     ),
-    'export': (
-        _("exporting bookmark %s\n"),
-        _('exporting bookmark %s failed!\n'),
+    b'export': (
+        _(b"exporting bookmark %s\n"),
+        _(b'exporting bookmark %s failed!\n'),
     ),
-    'delete': (
-        _("deleting remote bookmark %s\n"),
-        _('deleting remote bookmark %s failed!\n'),
+    b'delete': (
+        _(b"deleting remote bookmark %s\n"),
+        _(b'deleting remote bookmark %s failed!\n'),
     ),
 }
 
@@ -590,20 +597,20 @@
         )
         if missing:
             msg = _(
-                "required features are not"
-                " supported in the destination:"
-                " %s"
-            ) % (', '.join(sorted(missing)))
+                b"required features are not"
+                b" supported in the destination:"
+                b" %s"
+            ) % (b', '.join(sorted(missing)))
             raise error.Abort(msg)
 
     if not pushop.remote.canpush():
-        raise error.Abort(_("destination does not support push"))
-
-    if not pushop.remote.capable('unbundle'):
+        raise error.Abort(_(b"destination does not support push"))
+
+    if not pushop.remote.capable(b'unbundle'):
         raise error.Abort(
             _(
-                'cannot push: destination does not support the '
-                'unbundle wire protocol command'
+                b'cannot push: destination does not support the '
+                b'unbundle wire protocol command'
             )
         )
 
@@ -612,7 +619,7 @@
     try:
         # bundle2 push may receive a reply bundle touching bookmarks
         # requiring the wlock. Take it now to ensure proper ordering.
-        maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
+        maypushback = pushop.ui.configbool(b'experimental', b'bundle2.pushback')
         if (
             (not _forcebundle1(pushop))
             and maypushback
@@ -621,13 +628,13 @@
             wlock = pushop.repo.wlock()
         lock = pushop.repo.lock()
         pushop.trmanager = transactionmanager(
-            pushop.repo, 'push-response', pushop.remote.url()
+            pushop.repo, b'push-response', pushop.remote.url()
         )
     except error.LockUnavailable as err:
         # source repo cannot be locked.
         # We do not abort the push, but just disable the local phase
         # synchronisation.
-        msg = 'cannot lock source repository: %s\n' % stringutil.forcebytestr(
+        msg = b'cannot lock source repository: %s\n' % stringutil.forcebytestr(
             err
         )
         pushop.ui.debug(msg)
@@ -645,7 +652,7 @@
                 _pushobsolete(pushop)
                 _pushbookmark(pushop)
 
-    if repo.ui.configbool('experimental', 'remotenames'):
+    if repo.ui.configbool(b'experimental', b'remotenames'):
         logexchange.pullremotenames(repo, remote)
 
     return pushop
@@ -686,7 +693,7 @@
         step(pushop)
 
 
-@pushdiscovery('changeset')
+@pushdiscovery(b'changeset')
 def _pushdiscoverychangeset(pushop):
     """discover the changeset that need to be pushed"""
     fci = discovery.findcommonincoming
@@ -713,20 +720,20 @@
     pushop.incoming = inc
 
 
-@pushdiscovery('phase')
+@pushdiscovery(b'phase')
 def _pushdiscoveryphase(pushop):
     """discover the phase that needs to be pushed
 
     (computed for both success and failure case for changesets push)"""
     outgoing = pushop.outgoing
     unfi = pushop.repo.unfiltered()
-    remotephases = listkeys(pushop.remote, 'phases')
+    remotephases = listkeys(pushop.remote, b'phases')
 
     if (
-        pushop.ui.configbool('ui', '_usedassubrepo')
+        pushop.ui.configbool(b'ui', b'_usedassubrepo')
         and remotephases  # server supports phases
         and not pushop.outgoing.missing  # no changesets to be pushed
-        and remotephases.get('publishing', False)
+        and remotephases.get(b'publishing', False)
     ):
         # When:
         # - this is a subrepo push
@@ -746,10 +753,10 @@
     )
     droots = pushop.remotephases.draftroots
 
-    extracond = ''
+    extracond = b''
     if not pushop.remotephases.publishing:
-        extracond = ' and public()'
-    revset = 'heads((%%ln::%%ln) %s)' % extracond
+        extracond = b' and public()'
+    revset = b'heads((%%ln::%%ln) %s)' % extracond
     # Get the list of all revs draft on remote by public here.
     # XXX Beware that revset break if droots is not strictly
     # XXX root we may want to ensure it is but it is costly
@@ -757,7 +764,7 @@
     if not pushop.remotephases.publishing and pushop.publish:
         future = list(
             unfi.set(
-                '%ln and (not public() or %ln::)', pushop.futureheads, droots
+                b'%ln and (not public() or %ln::)', pushop.futureheads, droots
             )
         )
     elif not outgoing.missing:
@@ -768,7 +775,7 @@
         # should not be necessary for publishing server, but because of an
         # issue fixed in xxxxx we have to do it anyway.
         fdroots = list(
-            unfi.set('roots(%ln  + %ln::)', outgoing.missing, droots)
+            unfi.set(b'roots(%ln  + %ln::)', outgoing.missing, droots)
         )
         fdroots = [f.node() for f in fdroots]
         future = list(unfi.set(revset, fdroots, pushop.futureheads))
@@ -776,7 +783,7 @@
     pushop.fallbackoutdatedphases = fallback
 
 
-@pushdiscovery('obsmarker')
+@pushdiscovery(b'obsmarker')
 def _pushdiscoveryobsmarkers(pushop):
     if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
         return
@@ -784,28 +791,28 @@
     if not pushop.repo.obsstore:
         return
 
-    if 'obsolete' not in listkeys(pushop.remote, 'namespaces'):
+    if b'obsolete' not in listkeys(pushop.remote, b'namespaces'):
         return
 
     repo = pushop.repo
     # very naive computation, that can be quite expensive on big repo.
     # However: evolution is currently slow on them anyway.
-    nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
+    nodes = (c.node() for c in repo.set(b'::%ln', pushop.futureheads))
     pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
 
 
-@pushdiscovery('bookmarks')
+@pushdiscovery(b'bookmarks')
 def _pushdiscoverybookmarks(pushop):
     ui = pushop.ui
     repo = pushop.repo.unfiltered()
     remote = pushop.remote
-    ui.debug("checking for updated bookmarks\n")
+    ui.debug(b"checking for updated bookmarks\n")
     ancestors = ()
     if pushop.revs:
         revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
         ancestors = repo.changelog.ancestors(revnums, inclusive=True)
 
-    remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, 'bookmarks'))
+    remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, b'bookmarks'))
 
     explicit = {
         repo._bookmarks.expandname(bookmark) for bookmark in pushop.bookmarks
@@ -833,7 +840,7 @@
     for b, scid, dcid in addsrc:
         if b in explicit:
             explicit.remove(b)
-            pushop.outbookmarks.append((b, '', scid))
+            pushop.outbookmarks.append((b, b'', scid))
     # search for overwritten bookmark
     for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
         if b in explicit:
@@ -844,7 +851,7 @@
         if b in explicit:
             explicit.remove(b)
             # treat as "deleted locally"
-            pushop.outbookmarks.append((b, dcid, ''))
+            pushop.outbookmarks.append((b, dcid, b''))
     # identical bookmarks shouldn't get reported
     for b, scid, dcid in same:
         if b in explicit:
@@ -855,8 +862,8 @@
         # we should probably list all of them
         pushop.ui.warn(
             _(
-                'bookmark %s does not exist on the local '
-                'or remote repository!\n'
+                b'bookmark %s does not exist on the local '
+                b'or remote repository!\n'
             )
             % explicit[0]
         )
@@ -878,13 +885,13 @@
         # then, save the iteration
         if unfi.obsstore:
             # this message are here for 80 char limit reason
-            mso = _("push includes obsolete changeset: %s!")
-            mspd = _("push includes phase-divergent changeset: %s!")
-            mscd = _("push includes content-divergent changeset: %s!")
+            mso = _(b"push includes obsolete changeset: %s!")
+            mspd = _(b"push includes phase-divergent changeset: %s!")
+            mscd = _(b"push includes content-divergent changeset: %s!")
             mst = {
-                "orphan": _("push includes orphan changeset: %s!"),
-                "phase-divergent": mspd,
-                "content-divergent": mscd,
+                b"orphan": _(b"push includes orphan changeset: %s!"),
+                b"phase-divergent": mspd,
+                b"content-divergent": mscd,
             }
             # If we are to push if there is at least one
             # obsolete or unstable changeset in missing, at
@@ -942,10 +949,12 @@
     # * 'force' do not check for push race,
     # * if we don't push anything, there are nothing to check.
     if not pushop.force and pushop.outgoing.missingheads:
-        allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
+        allowunrelated = b'related' in bundler.capabilities.get(
+            b'checkheads', ()
+        )
         emptyremote = pushop.pushbranchmap is None
         if not allowunrelated or emptyremote:
-            bundler.newpart('check:heads', data=iter(pushop.remoteheads))
+            bundler.newpart(b'check:heads', data=iter(pushop.remoteheads))
         else:
             affected = set()
             for branch, heads in pushop.pushbranchmap.iteritems():
@@ -956,7 +965,7 @@
                     affected |= remote - set(newheads)
             if affected:
                 data = iter(sorted(affected))
-                bundler.newpart('check:updated-heads', data=data)
+                bundler.newpart(b'check:updated-heads', data=data)
 
 
 def _pushing(pushop):
@@ -969,29 +978,29 @@
     )
 
 
-@b2partsgenerator('check-bookmarks')
+@b2partsgenerator(b'check-bookmarks')
 def _pushb2checkbookmarks(pushop, bundler):
     """insert bookmark move checking"""
     if not _pushing(pushop) or pushop.force:
         return
     b2caps = bundle2.bundle2caps(pushop.remote)
-    hasbookmarkcheck = 'bookmarks' in b2caps
+    hasbookmarkcheck = b'bookmarks' in b2caps
     if not (pushop.outbookmarks and hasbookmarkcheck):
         return
     data = []
     for book, old, new in pushop.outbookmarks:
         data.append((book, old))
     checkdata = bookmod.binaryencode(data)
-    bundler.newpart('check:bookmarks', data=checkdata)
-
-
-@b2partsgenerator('check-phases')
+    bundler.newpart(b'check:bookmarks', data=checkdata)
+
+
+@b2partsgenerator(b'check-phases')
 def _pushb2checkphases(pushop, bundler):
     """insert phase move checking"""
     if not _pushing(pushop) or pushop.force:
         return
     b2caps = bundle2.bundle2caps(pushop.remote)
-    hasphaseheads = 'heads' in b2caps.get('phases', ())
+    hasphaseheads = b'heads' in b2caps.get(b'phases', ())
     if pushop.remotephases is not None and hasphaseheads:
         # check that the remote phase has not changed
         checks = [[] for p in phases.allphases]
@@ -1001,18 +1010,18 @@
             for nodes in checks:
                 nodes.sort()
             checkdata = phases.binaryencode(checks)
-            bundler.newpart('check:phases', data=checkdata)
-
-
-@b2partsgenerator('changeset')
+            bundler.newpart(b'check:phases', data=checkdata)
+
+
+@b2partsgenerator(b'changeset')
 def _pushb2ctx(pushop, bundler):
     """handle changegroup push through bundle2
 
     addchangegroup result is stored in the ``pushop.cgresult`` attribute.
     """
-    if 'changesets' in pushop.stepsdone:
+    if b'changesets' in pushop.stepsdone:
         return
-    pushop.stepsdone.add('changesets')
+    pushop.stepsdone.add(b'changesets')
     # Send known heads to the server for race detection.
     if not _pushcheckoutgoing(pushop):
         return
@@ -1021,8 +1030,8 @@
     _pushb2ctxcheckheads(pushop, bundler)
 
     b2caps = bundle2.bundle2caps(pushop.remote)
-    version = '01'
-    cgversions = b2caps.get('changegroup')
+    version = b'01'
+    cgversions = b2caps.get(b'changegroup')
     if cgversions:  # 3.1 and 3.2 ship with an empty value
         cgversions = [
             v
@@ -1030,37 +1039,37 @@
             if v in changegroup.supportedoutgoingversions(pushop.repo)
         ]
         if not cgversions:
-            raise error.Abort(_('no common changegroup version'))
+            raise error.Abort(_(b'no common changegroup version'))
         version = max(cgversions)
     cgstream = changegroup.makestream(
-        pushop.repo, pushop.outgoing, version, 'push'
+        pushop.repo, pushop.outgoing, version, b'push'
     )
-    cgpart = bundler.newpart('changegroup', data=cgstream)
+    cgpart = bundler.newpart(b'changegroup', data=cgstream)
     if cgversions:
-        cgpart.addparam('version', version)
-    if 'treemanifest' in pushop.repo.requirements:
-        cgpart.addparam('treemanifest', '1')
+        cgpart.addparam(b'version', version)
+    if b'treemanifest' in pushop.repo.requirements:
+        cgpart.addparam(b'treemanifest', b'1')
 
     def handlereply(op):
         """extract addchangegroup returns from server reply"""
         cgreplies = op.records.getreplies(cgpart.id)
-        assert len(cgreplies['changegroup']) == 1
-        pushop.cgresult = cgreplies['changegroup'][0]['return']
+        assert len(cgreplies[b'changegroup']) == 1
+        pushop.cgresult = cgreplies[b'changegroup'][0][b'return']
 
     return handlereply
 
 
-@b2partsgenerator('phase')
+@b2partsgenerator(b'phase')
 def _pushb2phases(pushop, bundler):
     """handle phase push through bundle2"""
-    if 'phases' in pushop.stepsdone:
+    if b'phases' in pushop.stepsdone:
         return
     b2caps = bundle2.bundle2caps(pushop.remote)
     ui = pushop.repo.ui
 
-    legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
-    haspushkey = 'pushkey' in b2caps
-    hasphaseheads = 'heads' in b2caps.get('phases', ())
+    legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
+    haspushkey = b'pushkey' in b2caps
+    hasphaseheads = b'heads' in b2caps.get(b'phases', ())
 
     if hasphaseheads and not legacyphase:
         return _pushb2phaseheads(pushop, bundler)
@@ -1070,100 +1079,100 @@
 
 def _pushb2phaseheads(pushop, bundler):
     """push phase information through a bundle2 - binary part"""
-    pushop.stepsdone.add('phases')
+    pushop.stepsdone.add(b'phases')
     if pushop.outdatedphases:
         updates = [[] for p in phases.allphases]
         updates[0].extend(h.node() for h in pushop.outdatedphases)
         phasedata = phases.binaryencode(updates)
-        bundler.newpart('phase-heads', data=phasedata)
+        bundler.newpart(b'phase-heads', data=phasedata)
 
 
 def _pushb2phasespushkey(pushop, bundler):
     """push phase information through a bundle2 - pushkey part"""
-    pushop.stepsdone.add('phases')
+    pushop.stepsdone.add(b'phases')
     part2node = []
 
     def handlefailure(pushop, exc):
         targetid = int(exc.partid)
         for partid, node in part2node:
             if partid == targetid:
-                raise error.Abort(_('updating %s to public failed') % node)
+                raise error.Abort(_(b'updating %s to public failed') % node)
 
     enc = pushkey.encode
     for newremotehead in pushop.outdatedphases:
-        part = bundler.newpart('pushkey')
-        part.addparam('namespace', enc('phases'))
-        part.addparam('key', enc(newremotehead.hex()))
-        part.addparam('old', enc('%d' % phases.draft))
-        part.addparam('new', enc('%d' % phases.public))
+        part = bundler.newpart(b'pushkey')
+        part.addparam(b'namespace', enc(b'phases'))
+        part.addparam(b'key', enc(newremotehead.hex()))
+        part.addparam(b'old', enc(b'%d' % phases.draft))
+        part.addparam(b'new', enc(b'%d' % phases.public))
         part2node.append((part.id, newremotehead))
         pushop.pkfailcb[part.id] = handlefailure
 
     def handlereply(op):
         for partid, node in part2node:
             partrep = op.records.getreplies(partid)
-            results = partrep['pushkey']
+            results = partrep[b'pushkey']
             assert len(results) <= 1
             msg = None
             if not results:
-                msg = _('server ignored update of %s to public!\n') % node
-            elif not int(results[0]['return']):
-                msg = _('updating %s to public failed!\n') % node
+                msg = _(b'server ignored update of %s to public!\n') % node
+            elif not int(results[0][b'return']):
+                msg = _(b'updating %s to public failed!\n') % node
             if msg is not None:
                 pushop.ui.warn(msg)
 
     return handlereply
 
 
-@b2partsgenerator('obsmarkers')
+@b2partsgenerator(b'obsmarkers')
 def _pushb2obsmarkers(pushop, bundler):
-    if 'obsmarkers' in pushop.stepsdone:
+    if b'obsmarkers' in pushop.stepsdone:
         return
     remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
     if obsolete.commonversion(remoteversions) is None:
         return
-    pushop.stepsdone.add('obsmarkers')
+    pushop.stepsdone.add(b'obsmarkers')
     if pushop.outobsmarkers:
         markers = sorted(pushop.outobsmarkers)
         bundle2.buildobsmarkerspart(bundler, markers)
 
 
-@b2partsgenerator('bookmarks')
+@b2partsgenerator(b'bookmarks')
 def _pushb2bookmarks(pushop, bundler):
     """handle bookmark push through bundle2"""
-    if 'bookmarks' in pushop.stepsdone:
+    if b'bookmarks' in pushop.stepsdone:
         return
     b2caps = bundle2.bundle2caps(pushop.remote)
 
-    legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
-    legacybooks = 'bookmarks' in legacy
-
-    if not legacybooks and 'bookmarks' in b2caps:
+    legacy = pushop.repo.ui.configlist(b'devel', b'legacy.exchange')
+    legacybooks = b'bookmarks' in legacy
+
+    if not legacybooks and b'bookmarks' in b2caps:
         return _pushb2bookmarkspart(pushop, bundler)
-    elif 'pushkey' in b2caps:
+    elif b'pushkey' in b2caps:
         return _pushb2bookmarkspushkey(pushop, bundler)
 
 
 def _bmaction(old, new):
     """small utility for bookmark pushing"""
     if not old:
-        return 'export'
+        return b'export'
     elif not new:
-        return 'delete'
-    return 'update'
+        return b'delete'
+    return b'update'
 
 
 def _abortonsecretctx(pushop, node, b):
     """abort if a given bookmark points to a secret changeset"""
     if node and pushop.repo[node].phase() == phases.secret:
         raise error.Abort(
-            _('cannot push bookmark %s as it points to a secret' ' changeset')
+            _(b'cannot push bookmark %s as it points to a secret' b' changeset')
             % b
         )
 
 
 def _pushb2bookmarkspart(pushop, bundler):
-    pushop.stepsdone.add('bookmarks')
+    pushop.stepsdone.add(b'bookmarks')
     if not pushop.outbookmarks:
         return
 
@@ -1174,7 +1183,7 @@
         data.append((book, new))
         allactions.append((book, _bmaction(old, new)))
     checkdata = bookmod.binaryencode(data)
-    bundler.newpart('bookmarks', data=checkdata)
+    bundler.newpart(b'bookmarks', data=checkdata)
 
     def handlereply(op):
         ui = pushop.ui
@@ -1186,7 +1195,7 @@
 
 
 def _pushb2bookmarkspushkey(pushop, bundler):
-    pushop.stepsdone.add('bookmarks')
+    pushop.stepsdone.add(b'bookmarks')
     part2book = []
     enc = pushkey.encode
 
@@ -1200,16 +1209,16 @@
 
     for book, old, new in pushop.outbookmarks:
         _abortonsecretctx(pushop, new, book)
-        part = bundler.newpart('pushkey')
-        part.addparam('namespace', enc('bookmarks'))
-        part.addparam('key', enc(book))
-        part.addparam('old', enc(hex(old)))
-        part.addparam('new', enc(hex(new)))
-        action = 'update'
+        part = bundler.newpart(b'pushkey')
+        part.addparam(b'namespace', enc(b'bookmarks'))
+        part.addparam(b'key', enc(book))
+        part.addparam(b'old', enc(hex(old)))
+        part.addparam(b'new', enc(hex(new)))
+        action = b'update'
         if not old:
-            action = 'export'
+            action = b'export'
         elif not new:
-            action = 'delete'
+            action = b'delete'
         part2book.append((part.id, book, action))
         pushop.pkfailcb[part.id] = handlefailure
 
@@ -1217,12 +1226,12 @@
         ui = pushop.ui
         for partid, book, action in part2book:
             partrep = op.records.getreplies(partid)
-            results = partrep['pushkey']
+            results = partrep[b'pushkey']
             assert len(results) <= 1
             if not results:
-                pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
+                pushop.ui.warn(_(b'server ignored bookmark %s update\n') % book)
             else:
-                ret = int(results[0]['return'])
+                ret = int(results[0][b'return'])
                 if ret:
                     ui.status(bookmsgmap[action][0] % book)
                 else:
@@ -1233,23 +1242,23 @@
     return handlereply
 
 
-@b2partsgenerator('pushvars', idx=0)
+@b2partsgenerator(b'pushvars', idx=0)
 def _getbundlesendvars(pushop, bundler):
     '''send shellvars via bundle2'''
     pushvars = pushop.pushvars
     if pushvars:
         shellvars = {}
         for raw in pushvars:
-            if '=' not in raw:
+            if b'=' not in raw:
                 msg = (
-                    "unable to parse variable '%s', should follow "
-                    "'KEY=VALUE' or 'KEY=' format"
+                    b"unable to parse variable '%s', should follow "
+                    b"'KEY=VALUE' or 'KEY=' format"
                 )
                 raise error.Abort(msg % raw)
-            k, v = raw.split('=', 1)
+            k, v = raw.split(b'=', 1)
             shellvars[k] = v
 
-        part = bundler.newpart('pushvars')
+        part = bundler.newpart(b'pushvars')
 
         for key, value in shellvars.iteritems():
             part.addparam(key, value, mandatory=False)
@@ -1262,14 +1271,14 @@
     evolve in the future."""
     bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
     pushback = pushop.trmanager and pushop.ui.configbool(
-        'experimental', 'bundle2.pushback'
+        b'experimental', b'bundle2.pushback'
     )
 
     # create reply capability
     capsblob = bundle2.encodecaps(
-        bundle2.getrepocaps(pushop.repo, allowpushback=pushback, role='client')
+        bundle2.getrepocaps(pushop.repo, allowpushback=pushback, role=b'client')
     )
-    bundler.newpart('replycaps', data=capsblob)
+    bundler.newpart(b'replycaps', data=capsblob)
     replyhandlers = []
     for partgenname in b2partsgenorder:
         partgen = b2partsgenmapping[partgenname]
@@ -1284,27 +1293,27 @@
         try:
             with pushop.remote.commandexecutor() as e:
                 reply = e.callcommand(
-                    'unbundle',
+                    b'unbundle',
                     {
-                        'bundle': stream,
-                        'heads': ['force'],
-                        'url': pushop.remote.url(),
+                        b'bundle': stream,
+                        b'heads': [b'force'],
+                        b'url': pushop.remote.url(),
                     },
                 ).result()
         except error.BundleValueError as exc:
-            raise error.Abort(_('missing support for %s') % exc)
+            raise error.Abort(_(b'missing support for %s') % exc)
         try:
             trgetter = None
             if pushback:
                 trgetter = pushop.trmanager.transaction
             op = bundle2.processbundle(pushop.repo, reply, trgetter)
         except error.BundleValueError as exc:
-            raise error.Abort(_('missing support for %s') % exc)
+            raise error.Abort(_(b'missing support for %s') % exc)
         except bundle2.AbortFromPart as exc:
-            pushop.ui.status(_('remote: %s\n') % exc)
+            pushop.ui.status(_(b'remote: %s\n') % exc)
             if exc.hint is not None:
-                pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
-            raise error.Abort(_('push failed on remote'))
+                pushop.ui.status(_(b'remote: %s\n') % (b'(%s)' % exc.hint))
+            raise error.Abort(_(b'push failed on remote'))
     except error.PushkeyFailed as exc:
         partid = int(exc.partid)
         if partid not in pushop.pkfailcb:
@@ -1316,14 +1325,14 @@
 
 def _pushchangeset(pushop):
     """Make the actual push of changeset bundle to remote repo"""
-    if 'changesets' in pushop.stepsdone:
+    if b'changesets' in pushop.stepsdone:
         return
-    pushop.stepsdone.add('changesets')
+    pushop.stepsdone.add(b'changesets')
     if not _pushcheckoutgoing(pushop):
         return
 
     # Should have verified this in push().
-    assert pushop.remote.capable('unbundle')
+    assert pushop.remote.capable(b'unbundle')
 
     pushop.repo.prepushoutgoinghooks(pushop)
     outgoing = pushop.outgoing
@@ -1338,14 +1347,14 @@
         cg = changegroup.makechangegroup(
             pushop.repo,
             outgoing,
-            '01',
-            'push',
+            b'01',
+            b'push',
             fastpath=True,
             bundlecaps=bundlecaps,
         )
     else:
         cg = changegroup.makechangegroup(
-            pushop.repo, outgoing, '01', 'push', bundlecaps=bundlecaps
+            pushop.repo, outgoing, b'01', b'push', bundlecaps=bundlecaps
         )
 
     # apply changegroup to remote
@@ -1354,7 +1363,7 @@
     # finds it has different heads (someone else won
     # commit/push race), server aborts.
     if pushop.force:
-        remoteheads = ['force']
+        remoteheads = [b'force']
     else:
         remoteheads = pushop.remoteheads
     # ssh: return remote's addchangegroup()
@@ -1366,12 +1375,12 @@
     """synchronise phase information locally and remotely"""
     cheads = pushop.commonheads
     # even when we don't push, exchanging phase data is useful
-    remotephases = listkeys(pushop.remote, 'phases')
+    remotephases = listkeys(pushop.remote, b'phases')
     if (
-        pushop.ui.configbool('ui', '_usedassubrepo')
+        pushop.ui.configbool(b'ui', b'_usedassubrepo')
         and remotephases  # server supports phases
         and pushop.cgresult is None  # nothing was pushed
-        and remotephases.get('publishing', False)
+        and remotephases.get(b'publishing', False)
     ):
         # When:
         # - this is a subrepo push
@@ -1382,7 +1391,7 @@
         # We drop the possible phase synchronisation done by
         # courtesy to publish changesets possibly locally draft
         # on the remote.
-        remotephases = {'publishing': 'True'}
+        remotephases = {b'publishing': b'True'}
     if not remotephases:  # old server or public only reply from non-publishing
         _localphasemove(pushop, cheads)
         # don't push any phase data as there is nothing to push
@@ -1390,7 +1399,7 @@
         ana = phases.analyzeremotephases(pushop.repo, cheads, remotephases)
         pheads, droots = ana
         ### Apply remote phase on local
-        if remotephases.get('publishing', False):
+        if remotephases.get(b'publishing', False):
             _localphasemove(pushop, cheads)
         else:  # publish = False
             _localphasemove(pushop, pheads)
@@ -1398,14 +1407,14 @@
         ### Apply local phase on remote
 
         if pushop.cgresult:
-            if 'phases' in pushop.stepsdone:
+            if b'phases' in pushop.stepsdone:
                 # phases already pushed though bundle2
                 return
             outdated = pushop.outdatedphases
         else:
             outdated = pushop.fallbackoutdatedphases
 
-        pushop.stepsdone.add('phases')
+        pushop.stepsdone.add(b'phases')
 
         # filter heads already turned public by the push
         outdated = [c for c in outdated if c.node() not in pheads]
@@ -1413,18 +1422,18 @@
         for newremotehead in outdated:
             with pushop.remote.commandexecutor() as e:
                 r = e.callcommand(
-                    'pushkey',
+                    b'pushkey',
                     {
-                        'namespace': 'phases',
-                        'key': newremotehead.hex(),
-                        'old': '%d' % phases.draft,
-                        'new': '%d' % phases.public,
+                        b'namespace': b'phases',
+                        b'key': newremotehead.hex(),
+                        b'old': b'%d' % phases.draft,
+                        b'new': b'%d' % phases.public,
                     },
                 ).result()
 
             if not r:
                 pushop.ui.warn(
-                    _('updating %s to public failed!\n') % newremotehead
+                    _(b'updating %s to public failed!\n') % newremotehead
                 )
 
 
@@ -1443,8 +1452,8 @@
         if actualmoves:
             pushop.ui.status(
                 _(
-                    'cannot lock source repo, skipping '
-                    'local %s phase update\n'
+                    b'cannot lock source repo, skipping '
+                    b'local %s phase update\n'
                 )
                 % phasestr
             )
@@ -1452,47 +1461,47 @@
 
 def _pushobsolete(pushop):
     """utility function to push obsolete markers to a remote"""
-    if 'obsmarkers' in pushop.stepsdone:
+    if b'obsmarkers' in pushop.stepsdone:
         return
     repo = pushop.repo
     remote = pushop.remote
-    pushop.stepsdone.add('obsmarkers')
+    pushop.stepsdone.add(b'obsmarkers')
     if pushop.outobsmarkers:
-        pushop.ui.debug('try to push obsolete markers to remote\n')
+        pushop.ui.debug(b'try to push obsolete markers to remote\n')
         rslts = []
         remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
         for key in sorted(remotedata, reverse=True):
             # reverse sort to ensure we end with dump0
             data = remotedata[key]
-            rslts.append(remote.pushkey('obsolete', key, '', data))
+            rslts.append(remote.pushkey(b'obsolete', key, b'', data))
         if [r for r in rslts if not r]:
-            msg = _('failed to push some obsolete markers!\n')
+            msg = _(b'failed to push some obsolete markers!\n')
             repo.ui.warn(msg)
 
 
 def _pushbookmark(pushop):
     """Update bookmark position on remote"""
-    if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
+    if pushop.cgresult == 0 or b'bookmarks' in pushop.stepsdone:
         return
-    pushop.stepsdone.add('bookmarks')
+    pushop.stepsdone.add(b'bookmarks')
     ui = pushop.ui
     remote = pushop.remote
 
     for b, old, new in pushop.outbookmarks:
-        action = 'update'
+        action = b'update'
         if not old:
-            action = 'export'
+            action = b'export'
         elif not new:
-            action = 'delete'
+            action = b'delete'
 
         with remote.commandexecutor() as e:
             r = e.callcommand(
-                'pushkey',
+                b'pushkey',
                 {
-                    'namespace': 'bookmarks',
-                    'key': b,
-                    'old': hex(old),
-                    'new': hex(new),
+                    b'namespace': b'bookmarks',
+                    b'key': b,
+                    b'old': hex(old),
+                    b'new': hex(new),
                 },
             ).result()
 
@@ -1610,10 +1619,10 @@
     def transaction(self):
         """Return an open transaction object, constructing if necessary"""
         if not self._tr:
-            trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
+            trname = b'%s\n%s' % (self.source, util.hidepassword(self.url))
             self._tr = self.repo.transaction(trname)
-            self._tr.hookargs['source'] = self.source
-            self._tr.hookargs['url'] = self.url
+            self._tr.hookargs[b'source'] = self.source
+            self._tr.hookargs[b'url'] = self.url
         return self._tr
 
     def close(self):
@@ -1629,7 +1638,7 @@
 
 def listkeys(remote, namespace):
     with remote.commandexecutor() as e:
-        return e.callcommand('listkeys', {'namespace': namespace}).result()
+        return e.callcommand(b'listkeys', {b'namespace': namespace}).result()
 
 
 def _fullpullbundle2(repo, pullop):
@@ -1647,12 +1656,12 @@
 
     def headsofdiff(h1, h2):
         """Returns heads(h1 % h2)"""
-        res = unfi.set('heads(%ln %% %ln)', h1, h2)
+        res = unfi.set(b'heads(%ln %% %ln)', h1, h2)
         return set(ctx.node() for ctx in res)
 
     def headsofunion(h1, h2):
         """Returns heads((h1 + h2) - null)"""
-        res = unfi.set('heads((%ln + %ln - null))', h1, h2)
+        res = unfi.set(b'heads((%ln + %ln - null))', h1, h2)
         return set(ctx.node() for ctx in res)
 
     while True:
@@ -1744,19 +1753,19 @@
         missing = set(peerlocal.requirements) - pullop.repo.supported
         if missing:
             msg = _(
-                "required features are not"
-                " supported in the destination:"
-                " %s"
-            ) % (', '.join(sorted(missing)))
+                b"required features are not"
+                b" supported in the destination:"
+                b" %s"
+            ) % (b', '.join(sorted(missing)))
             raise error.Abort(msg)
 
-    pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
+    pullop.trmanager = transactionmanager(repo, b'pull', remote.url())
     wlock = util.nullcontextmanager()
     if not bookmod.bookmarksinstore(repo):
         wlock = repo.wlock()
     with wlock, repo.lock(), pullop.trmanager:
         # Use the modern wire protocol, if available.
-        if remote.capable('command-changesetdata'):
+        if remote.capable(b'command-changesetdata'):
             exchangev2.pull(pullop)
         else:
             # This should ideally be in _pullbundle2(). However, it needs to run
@@ -1772,7 +1781,7 @@
             _pullobsolete(pullop)
 
     # storing remotenames
-    if repo.ui.configbool('experimental', 'remotenames'):
+    if repo.ui.configbool(b'experimental', b'remotenames'):
         logexchange.pullremotenames(repo, remote)
 
     return pullop
@@ -1813,7 +1822,7 @@
         step(pullop)
 
 
-@pulldiscovery('b1:bookmarks')
+@pulldiscovery(b'b1:bookmarks')
 def _pullbookmarkbundle1(pullop):
     """fetch bookmark data in bundle1 case
 
@@ -1821,15 +1830,15 @@
     discovery to reduce the chance and impact of race conditions."""
     if pullop.remotebookmarks is not None:
         return
-    if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
+    if pullop.canusebundle2 and b'listkeys' in pullop.remotebundle2caps:
         # all known bundle2 servers now support listkeys, but lets be nice with
         # new implementation.
         return
-    books = listkeys(pullop.remote, 'bookmarks')
+    books = listkeys(pullop.remote, b'bookmarks')
     pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
 
 
-@pulldiscovery('changegroup')
+@pulldiscovery(b'changegroup')
 def _pulldiscoverychangegroup(pullop):
     """discovery phase for the pull
 
@@ -1866,7 +1875,7 @@
     """pull data using bundle2
 
     For now, the only supported data are changegroup."""
-    kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
+    kwargs = {b'bundlecaps': caps20to10(pullop.repo, role=b'client')}
 
     # make ui easier to access
     ui = pullop.repo.ui
@@ -1876,60 +1885,60 @@
     streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
 
     # declare pull perimeters
-    kwargs['common'] = pullop.common
-    kwargs['heads'] = pullop.heads or pullop.rheads
+    kwargs[b'common'] = pullop.common
+    kwargs[b'heads'] = pullop.heads or pullop.rheads
 
     # check server supports narrow and then adding includepats and excludepats
     servernarrow = pullop.remote.capable(wireprototypes.NARROWCAP)
     if servernarrow and pullop.includepats:
-        kwargs['includepats'] = pullop.includepats
+        kwargs[b'includepats'] = pullop.includepats
     if servernarrow and pullop.excludepats:
-        kwargs['excludepats'] = pullop.excludepats
+        kwargs[b'excludepats'] = pullop.excludepats
 
     if streaming:
-        kwargs['cg'] = False
-        kwargs['stream'] = True
-        pullop.stepsdone.add('changegroup')
-        pullop.stepsdone.add('phases')
+        kwargs[b'cg'] = False
+        kwargs[b'stream'] = True
+        pullop.stepsdone.add(b'changegroup')
+        pullop.stepsdone.add(b'phases')
 
     else:
         # pulling changegroup
-        pullop.stepsdone.add('changegroup')
-
-        kwargs['cg'] = pullop.fetch
-
-        legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
-        hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
+        pullop.stepsdone.add(b'changegroup')
+
+        kwargs[b'cg'] = pullop.fetch
+
+        legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
+        hasbinaryphase = b'heads' in pullop.remotebundle2caps.get(b'phases', ())
         if not legacyphase and hasbinaryphase:
-            kwargs['phases'] = True
-            pullop.stepsdone.add('phases')
-
-        if 'listkeys' in pullop.remotebundle2caps:
-            if 'phases' not in pullop.stepsdone:
-                kwargs['listkeys'] = ['phases']
+            kwargs[b'phases'] = True
+            pullop.stepsdone.add(b'phases')
+
+        if b'listkeys' in pullop.remotebundle2caps:
+            if b'phases' not in pullop.stepsdone:
+                kwargs[b'listkeys'] = [b'phases']
 
     bookmarksrequested = False
-    legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
-    hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
+    legacybookmark = b'bookmarks' in ui.configlist(b'devel', b'legacy.exchange')
+    hasbinarybook = b'bookmarks' in pullop.remotebundle2caps
 
     if pullop.remotebookmarks is not None:
-        pullop.stepsdone.add('request-bookmarks')
+        pullop.stepsdone.add(b'request-bookmarks')
 
     if (
-        'request-bookmarks' not in pullop.stepsdone
+        b'request-bookmarks' not in pullop.stepsdone
         and pullop.remotebookmarks is None
         and not legacybookmark
         and hasbinarybook
     ):
-        kwargs['bookmarks'] = True
+        kwargs[b'bookmarks'] = True
         bookmarksrequested = True
 
-    if 'listkeys' in pullop.remotebundle2caps:
-        if 'request-bookmarks' not in pullop.stepsdone:
+    if b'listkeys' in pullop.remotebundle2caps:
+        if b'request-bookmarks' not in pullop.stepsdone:
             # make sure to always includes bookmark data when migrating
             # `hg incoming --bundle` to using this function.
-            pullop.stepsdone.add('request-bookmarks')
-            kwargs.setdefault('listkeys', []).append('bookmarks')
+            pullop.stepsdone.add(b'request-bookmarks')
+            kwargs.setdefault(b'listkeys', []).append(b'bookmarks')
 
     # If this is a full pull / clone and the server supports the clone bundles
     # feature, tell the server whether we attempted a clone bundle. The
@@ -1937,61 +1946,61 @@
     # will enable the server to treat clients that support clone bundles
     # differently from those that don't.
     if (
-        pullop.remote.capable('clonebundles')
+        pullop.remote.capable(b'clonebundles')
         and pullop.heads is None
         and list(pullop.common) == [nullid]
     ):
-        kwargs['cbattempted'] = pullop.clonebundleattempted
+        kwargs[b'cbattempted'] = pullop.clonebundleattempted
 
     if streaming:
-        pullop.repo.ui.status(_('streaming all changes\n'))
+        pullop.repo.ui.status(_(b'streaming all changes\n'))
     elif not pullop.fetch:
-        pullop.repo.ui.status(_("no changes found\n"))
+        pullop.repo.ui.status(_(b"no changes found\n"))
         pullop.cgresult = 0
     else:
         if pullop.heads is None and list(pullop.common) == [nullid]:
-            pullop.repo.ui.status(_("requesting all changes\n"))
+            pullop.repo.ui.status(_(b"requesting all changes\n"))
     if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
         remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
         if obsolete.commonversion(remoteversions) is not None:
-            kwargs['obsmarkers'] = True
-            pullop.stepsdone.add('obsmarkers')
+            kwargs[b'obsmarkers'] = True
+            pullop.stepsdone.add(b'obsmarkers')
     _pullbundle2extraprepare(pullop, kwargs)
 
     with pullop.remote.commandexecutor() as e:
         args = dict(kwargs)
-        args['source'] = 'pull'
-        bundle = e.callcommand('getbundle', args).result()
+        args[b'source'] = b'pull'
+        bundle = e.callcommand(b'getbundle', args).result()
 
         try:
             op = bundle2.bundleoperation(
-                pullop.repo, pullop.gettransaction, source='pull'
+                pullop.repo, pullop.gettransaction, source=b'pull'
             )
-            op.modes['bookmarks'] = 'records'
+            op.modes[b'bookmarks'] = b'records'
             bundle2.processbundle(pullop.repo, bundle, op=op)
         except bundle2.AbortFromPart as exc:
-            pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
-            raise error.Abort(_('pull failed on remote'), hint=exc.hint)
+            pullop.repo.ui.status(_(b'remote: abort: %s\n') % exc)
+            raise error.Abort(_(b'pull failed on remote'), hint=exc.hint)
         except error.BundleValueError as exc:
-            raise error.Abort(_('missing support for %s') % exc)
+            raise error.Abort(_(b'missing support for %s') % exc)
 
     if pullop.fetch:
         pullop.cgresult = bundle2.combinechangegroupresults(op)
 
     # processing phases change
-    for namespace, value in op.records['listkeys']:
-        if namespace == 'phases':
+    for namespace, value in op.records[b'listkeys']:
+        if namespace == b'phases':
             _pullapplyphases(pullop, value)
 
     # processing bookmark update
     if bookmarksrequested:
         books = {}
-        for record in op.records['bookmarks']:
-            books[record['bookmark']] = record["node"]
+        for record in op.records[b'bookmarks']:
+            books[record[b'bookmark']] = record[b"node"]
         pullop.remotebookmarks = books
     else:
-        for namespace, value in op.records['listkeys']:
-            if namespace == 'bookmarks':
+        for namespace, value in op.records[b'listkeys']:
+            if namespace == b'bookmarks':
                 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
 
     # bookmark data were either already there or pulled in the bundle
@@ -2008,70 +2017,70 @@
     # We delay the open of the transaction as late as possible so we
     # don't open transaction for nothing or you break future useful
     # rollback call
-    if 'changegroup' in pullop.stepsdone:
+    if b'changegroup' in pullop.stepsdone:
         return
-    pullop.stepsdone.add('changegroup')
+    pullop.stepsdone.add(b'changegroup')
     if not pullop.fetch:
-        pullop.repo.ui.status(_("no changes found\n"))
+        pullop.repo.ui.status(_(b"no changes found\n"))
         pullop.cgresult = 0
         return
     tr = pullop.gettransaction()
     if pullop.heads is None and list(pullop.common) == [nullid]:
-        pullop.repo.ui.status(_("requesting all changes\n"))
-    elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
+        pullop.repo.ui.status(_(b"requesting all changes\n"))
+    elif pullop.heads is None and pullop.remote.capable(b'changegroupsubset'):
         # issue1320, avoid a race if remote changed after discovery
         pullop.heads = pullop.rheads
 
-    if pullop.remote.capable('getbundle'):
+    if pullop.remote.capable(b'getbundle'):
         # TODO: get bundlecaps from remote
         cg = pullop.remote.getbundle(
-            'pull', common=pullop.common, heads=pullop.heads or pullop.rheads
+            b'pull', common=pullop.common, heads=pullop.heads or pullop.rheads
         )
     elif pullop.heads is None:
         with pullop.remote.commandexecutor() as e:
             cg = e.callcommand(
-                'changegroup', {'nodes': pullop.fetch, 'source': 'pull',}
+                b'changegroup', {b'nodes': pullop.fetch, b'source': b'pull',}
             ).result()
 
-    elif not pullop.remote.capable('changegroupsubset'):
+    elif not pullop.remote.capable(b'changegroupsubset'):
         raise error.Abort(
             _(
-                "partial pull cannot be done because "
-                "other repository doesn't support "
-                "changegroupsubset."
+                b"partial pull cannot be done because "
+                b"other repository doesn't support "
+                b"changegroupsubset."
             )
         )
     else:
         with pullop.remote.commandexecutor() as e:
             cg = e.callcommand(
-                'changegroupsubset',
+                b'changegroupsubset',
                 {
-                    'bases': pullop.fetch,
-                    'heads': pullop.heads,
-                    'source': 'pull',
+                    b'bases': pullop.fetch,
+                    b'heads': pullop.heads,
+                    b'source': b'pull',
                 },
             ).result()
 
     bundleop = bundle2.applybundle(
-        pullop.repo, cg, tr, 'pull', pullop.remote.url()
+        pullop.repo, cg, tr, b'pull', pullop.remote.url()
     )
     pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
 
 
 def _pullphase(pullop):
     # Get remote phases data from remote
-    if 'phases' in pullop.stepsdone:
+    if b'phases' in pullop.stepsdone:
         return
-    remotephases = listkeys(pullop.remote, 'phases')
+    remotephases = listkeys(pullop.remote, b'phases')
     _pullapplyphases(pullop, remotephases)
 
 
 def _pullapplyphases(pullop, remotephases):
     """apply phase movement from observed remote state"""
-    if 'phases' in pullop.stepsdone:
+    if b'phases' in pullop.stepsdone:
         return
-    pullop.stepsdone.add('phases')
-    publishing = bool(remotephases.get('publishing', False))
+    pullop.stepsdone.add(b'phases')
+    publishing = bool(remotephases.get(b'publishing', False))
     if remotephases and not publishing:
         # remote is new and non-publishing
         pheads, _dr = phases.analyzeremotephases(
@@ -2104,9 +2113,9 @@
 
 def _pullbookmarks(pullop):
     """process the remote bookmark information to update the local one"""
-    if 'bookmarks' in pullop.stepsdone:
+    if b'bookmarks' in pullop.stepsdone:
         return
-    pullop.stepsdone.add('bookmarks')
+    pullop.stepsdone.add(b'bookmarks')
     repo = pullop.repo
     remotebookmarks = pullop.remotebookmarks
     bookmod.updatefromremote(
@@ -2127,18 +2136,18 @@
     a new transaction have been created (when applicable).
 
     Exists mostly to allow overriding for experimentation purpose"""
-    if 'obsmarkers' in pullop.stepsdone:
+    if b'obsmarkers' in pullop.stepsdone:
         return
-    pullop.stepsdone.add('obsmarkers')
+    pullop.stepsdone.add(b'obsmarkers')
     tr = None
     if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
-        pullop.repo.ui.debug('fetching remote obsolete markers\n')
-        remoteobs = listkeys(pullop.remote, 'obsolete')
-        if 'dump0' in remoteobs:
+        pullop.repo.ui.debug(b'fetching remote obsolete markers\n')
+        remoteobs = listkeys(pullop.remote, b'obsolete')
+        if b'dump0' in remoteobs:
             tr = pullop.gettransaction()
             markers = []
             for key in sorted(remoteobs, reverse=True):
-                if key.startswith('dump'):
+                if key.startswith(b'dump'):
                     data = util.b85decode(remoteobs[key])
                     version, newmarks = obsolete._readmarkers(data)
                     markers += newmarks
@@ -2156,29 +2165,29 @@
     """
     ui = repo.ui
     # TODO this assumes existence of HTTP and is a layering violation.
-    username = ui.shortuser(ui.environ.get('REMOTE_USER') or ui.username())
+    username = ui.shortuser(ui.environ.get(b'REMOTE_USER') or ui.username())
     user_includes = ui.configlist(
         _NARROWACL_SECTION,
-        username + '.includes',
-        ui.configlist(_NARROWACL_SECTION, 'default.includes'),
+        username + b'.includes',
+        ui.configlist(_NARROWACL_SECTION, b'default.includes'),
     )
     user_excludes = ui.configlist(
         _NARROWACL_SECTION,
-        username + '.excludes',
-        ui.configlist(_NARROWACL_SECTION, 'default.excludes'),
+        username + b'.excludes',
+        ui.configlist(_NARROWACL_SECTION, b'default.excludes'),
     )
     if not user_includes:
         raise error.Abort(
-            _("{} configuration for user {} is empty").format(
+            _(b"{} configuration for user {} is empty").format(
                 _NARROWACL_SECTION, username
             )
         )
 
     user_includes = [
-        'path:.' if p == '*' else 'path:' + p for p in user_includes
+        b'path:.' if p == b'*' else b'path:' + p for p in user_includes
     ]
     user_excludes = [
-        'path:.' if p == '*' else 'path:' + p for p in user_excludes
+        b'path:.' if p == b'*' else b'path:' + p for p in user_excludes
     ]
 
     req_includes = set(kwargs.get(r'includepats', []))
@@ -2190,7 +2199,7 @@
 
     if invalid_includes:
         raise error.Abort(
-            _("The following includes are not accessible for {}: {}").format(
+            _(b"The following includes are not accessible for {}: {}").format(
                 username, invalid_includes
             )
         )
@@ -2265,7 +2274,7 @@
         r1, r2, r3 = sorted(ellipsisroots[head])
         for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)):
             mid = repo.revs(
-                'sort(merge() & %d::%d & %d::%d, -rev)', nr1, head, nr2, head
+                b'sort(merge() & %d::%d & %d::%d, -rev)', nr1, head, nr2, head
             )
             for j in mid:
                 if j == nr2:
@@ -2273,7 +2282,10 @@
                 if j not in ellipsisroots or len(ellipsisroots[j]) < 2:
                     return j, (nr1, nr2)
         raise error.Abort(
-            _('Failed to split up ellipsis node! head: %d, ' 'roots: %d %d %d')
+            _(
+                b'Failed to split up ellipsis node! head: %d, '
+                b'roots: %d %d %d'
+            )
             % (head, r1, r2, r3)
         )
 
@@ -2338,9 +2350,9 @@
 
 def caps20to10(repo, role):
     """return a set with appropriate options to use bundle20 during getbundle"""
-    caps = {'HG20'}
+    caps = {b'HG20'}
     capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
-    caps.add('bundle2=' + urlreq.quote(capsblob))
+    caps.add(b'bundle2=' + urlreq.quote(capsblob))
     return caps
 
 
@@ -2377,7 +2389,7 @@
 
 def bundle2requested(bundlecaps):
     if bundlecaps is not None:
-        return any(cap.startswith('HG2') for cap in bundlecaps)
+        return any(cap.startswith(b'HG2') for cap in bundlecaps)
     return False
 
 
@@ -2397,34 +2409,36 @@
     usebundle2 = bundle2requested(bundlecaps)
     # bundle10 case
     if not usebundle2:
-        if bundlecaps and not kwargs.get('cg', True):
-            raise ValueError(_('request for bundle10 must include changegroup'))
+        if bundlecaps and not kwargs.get(b'cg', True):
+            raise ValueError(
+                _(b'request for bundle10 must include changegroup')
+            )
 
         if kwargs:
             raise ValueError(
-                _('unsupported getbundle arguments: %s')
-                % ', '.join(sorted(kwargs.keys()))
+                _(b'unsupported getbundle arguments: %s')
+                % b', '.join(sorted(kwargs.keys()))
             )
         outgoing = _computeoutgoing(repo, heads, common)
-        info['bundleversion'] = 1
+        info[b'bundleversion'] = 1
         return (
             info,
             changegroup.makestream(
-                repo, outgoing, '01', source, bundlecaps=bundlecaps
+                repo, outgoing, b'01', source, bundlecaps=bundlecaps
             ),
         )
 
     # bundle20 case
-    info['bundleversion'] = 2
+    info[b'bundleversion'] = 2
     b2caps = {}
     for bcaps in bundlecaps:
-        if bcaps.startswith('bundle2='):
-            blob = urlreq.unquote(bcaps[len('bundle2=') :])
+        if bcaps.startswith(b'bundle2='):
+            blob = urlreq.unquote(bcaps[len(b'bundle2=') :])
             b2caps.update(bundle2.decodecaps(blob))
     bundler = bundle2.bundle20(repo.ui, b2caps)
 
-    kwargs['heads'] = heads
-    kwargs['common'] = common
+    kwargs[b'heads'] = heads
+    kwargs[b'common'] = common
 
     for name in getbundle2partsorder:
         func = getbundle2partsmapping[name]
@@ -2437,17 +2451,17 @@
             **pycompat.strkwargs(kwargs)
         )
 
-    info['prefercompressed'] = bundler.prefercompressed
+    info[b'prefercompressed'] = bundler.prefercompressed
 
     return info, bundler.getchunks()
 
 
-@getbundle2partsgenerator('stream2')
+@getbundle2partsgenerator(b'stream2')
 def _getbundlestream2(bundler, repo, *args, **kwargs):
     return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
 
 
-@getbundle2partsgenerator('changegroup')
+@getbundle2partsgenerator(b'changegroup')
 def _getbundlechangegrouppart(
     bundler,
     repo,
@@ -2462,8 +2476,8 @@
     if not kwargs.get(r'cg', True):
         return
 
-    version = '01'
-    cgversions = b2caps.get('changegroup')
+    version = b'01'
+    cgversions = b2caps.get(b'changegroup')
     if cgversions:  # 3.1 and 3.2 ship with an empty value
         cgversions = [
             v
@@ -2471,7 +2485,7 @@
             if v in changegroup.supportedoutgoingversions(repo)
         ]
         if not cgversions:
-            raise error.Abort(_('no common changegroup version'))
+            raise error.Abort(_(b'no common changegroup version'))
         version = max(cgversions)
 
     outgoing = _computeoutgoing(repo, heads, common)
@@ -2489,14 +2503,14 @@
         repo, outgoing, version, source, bundlecaps=bundlecaps, matcher=matcher
     )
 
-    part = bundler.newpart('changegroup', data=cgstream)
+    part = bundler.newpart(b'changegroup', data=cgstream)
     if cgversions:
-        part.addparam('version', version)
-
-    part.addparam('nbchanges', '%d' % len(outgoing.missing), mandatory=False)
-
-    if 'treemanifest' in repo.requirements:
-        part.addparam('treemanifest', '1')
+        part.addparam(b'version', version)
+
+    part.addparam(b'nbchanges', b'%d' % len(outgoing.missing), mandatory=False)
+
+    if b'treemanifest' in repo.requirements:
+        part.addparam(b'treemanifest', b'1')
 
     if (
         kwargs.get(r'narrow', False)
@@ -2504,42 +2518,42 @@
         and (include or exclude)
     ):
         # this is mandatory because otherwise ACL clients won't work
-        narrowspecpart = bundler.newpart('Narrow:responsespec')
-        narrowspecpart.data = '%s\0%s' % (
-            '\n'.join(include),
-            '\n'.join(exclude),
+        narrowspecpart = bundler.newpart(b'Narrow:responsespec')
+        narrowspecpart.data = b'%s\0%s' % (
+            b'\n'.join(include),
+            b'\n'.join(exclude),
         )
 
 
-@getbundle2partsgenerator('bookmarks')
+@getbundle2partsgenerator(b'bookmarks')
 def _getbundlebookmarkpart(
     bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
 ):
     """add a bookmark part to the requested bundle"""
     if not kwargs.get(r'bookmarks', False):
         return
-    if 'bookmarks' not in b2caps:
-        raise error.Abort(_('no common bookmarks exchange method'))
+    if b'bookmarks' not in b2caps:
+        raise error.Abort(_(b'no common bookmarks exchange method'))
     books = bookmod.listbinbookmarks(repo)
     data = bookmod.binaryencode(books)
     if data:
-        bundler.newpart('bookmarks', data=data)
-
-
-@getbundle2partsgenerator('listkeys')
+        bundler.newpart(b'bookmarks', data=data)
+
+
+@getbundle2partsgenerator(b'listkeys')
 def _getbundlelistkeysparts(
     bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
 ):
     """add parts containing listkeys namespaces to the requested bundle"""
     listkeys = kwargs.get(r'listkeys', ())
     for namespace in listkeys:
-        part = bundler.newpart('listkeys')
-        part.addparam('namespace', namespace)
+        part = bundler.newpart(b'listkeys')
+        part.addparam(b'namespace', namespace)
         keys = repo.listkeys(namespace).items()
         part.data = pushkey.encodekeys(keys)
 
 
-@getbundle2partsgenerator('obsmarkers')
+@getbundle2partsgenerator(b'obsmarkers')
 def _getbundleobsmarkerpart(
     bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
 ):
@@ -2547,20 +2561,20 @@
     if kwargs.get(r'obsmarkers', False):
         if heads is None:
             heads = repo.heads()
-        subset = [c.node() for c in repo.set('::%ln', heads)]
+        subset = [c.node() for c in repo.set(b'::%ln', heads)]
         markers = repo.obsstore.relevantmarkers(subset)
         markers = sorted(markers)
         bundle2.buildobsmarkerspart(bundler, markers)
 
 
-@getbundle2partsgenerator('phases')
+@getbundle2partsgenerator(b'phases')
 def _getbundlephasespart(
     bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
 ):
     """add phase heads part to the requested bundle"""
     if kwargs.get(r'phases', False):
-        if not 'heads' in b2caps.get('phases'):
-            raise error.Abort(_('no common phases exchange method'))
+        if not b'heads' in b2caps.get(b'phases'):
+            raise error.Abort(_(b'no common phases exchange method'))
         if heads is None:
             heads = repo.heads()
 
@@ -2587,7 +2601,7 @@
             if draftheads:
                 publicheads = headsbyphase.get(phases.public, set())
 
-                revset = 'heads(only(%ln, %ln) and public())'
+                revset = b'heads(only(%ln, %ln) and public())'
                 extraheads = repo.revs(revset, draftheads, publicheads)
                 for r in extraheads:
                     headsbyphase[phases.public].add(node(r))
@@ -2599,10 +2613,10 @@
 
         # generate the actual part
         phasedata = phases.binaryencode(phasemapping)
-        bundler.newpart('phase-heads', data=phasedata)
-
-
-@getbundle2partsgenerator('hgtagsfnodes')
+        bundler.newpart(b'phase-heads', data=phasedata)
+
+
+@getbundle2partsgenerator(b'hgtagsfnodes')
 def _getbundletagsfnodes(
     bundler,
     repo,
@@ -2623,14 +2637,14 @@
     # Don't send unless:
     # - changeset are being exchanged,
     # - the client supports it.
-    if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
+    if not (kwargs.get(r'cg', True) and b'hgtagsfnodes' in b2caps):
         return
 
     outgoing = _computeoutgoing(repo, heads, common)
     bundle2.addparttagsfnodescache(repo, bundler, outgoing)
 
 
-@getbundle2partsgenerator('cache:rev-branch-cache')
+@getbundle2partsgenerator(b'cache:rev-branch-cache')
 def _getbundlerevbranchcache(
     bundler,
     repo,
@@ -2657,7 +2671,7 @@
     # - narrow bundle isn't in play (not currently compatible).
     if (
         not kwargs.get(r'cg', True)
-        or 'rev-branch-cache' not in b2caps
+        or b'rev-branch-cache' not in b2caps
         or kwargs.get(r'narrow', False)
         or repo.ui.has_section(_NARROWACL_SECTION)
     ):
@@ -2673,16 +2687,16 @@
     Used by peer for unbundling.
     """
     heads = repo.heads()
-    heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
+    heads_hash = hashlib.sha1(b''.join(sorted(heads))).digest()
     if not (
-        their_heads == ['force']
+        their_heads == [b'force']
         or their_heads == heads
-        or their_heads == ['hashed', heads_hash]
+        or their_heads == [b'hashed', heads_hash]
     ):
         # someone else committed/pushed/unbundled while we
         # were transferring data
         raise error.PushRaced(
-            'repository changed while %s - ' 'please try again' % context
+            b'repository changed while %s - ' b'please try again' % context
         )
 
 
@@ -2700,17 +2714,19 @@
     lockandtr = [None, None, None]
     recordout = None
     # quick fix for output mismatch with bundle2 in 3.4
-    captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
-    if url.startswith('remote:http:') or url.startswith('remote:https:'):
+    captureoutput = repo.ui.configbool(
+        b'experimental', b'bundle2-output-capture'
+    )
+    if url.startswith(b'remote:http:') or url.startswith(b'remote:https:'):
         captureoutput = True
     try:
         # note: outside bundle1, 'heads' is expected to be empty and this
         # 'check_heads' call wil be a no-op
-        check_heads(repo, heads, 'uploading changes')
+        check_heads(repo, heads, b'uploading changes')
         # push can proceed
         if not isinstance(cg, bundle2.unbundle20):
             # legacy case: bundle1 (changegroup 01)
-            txnname = "\n".join([source, util.hidepassword(url)])
+            txnname = b"\n".join([source, util.hidepassword(url)])
             with repo.lock(), repo.transaction(txnname) as tr:
                 op = bundle2.applybundle(repo, cg, tr, source, url)
                 r = bundle2.combinechangegroupresults(op)
@@ -2724,21 +2740,23 @@
                             lockandtr[0] = repo.wlock()
                         lockandtr[1] = repo.lock()
                         lockandtr[2] = repo.transaction(source)
-                        lockandtr[2].hookargs['source'] = source
-                        lockandtr[2].hookargs['url'] = url
-                        lockandtr[2].hookargs['bundle2'] = '1'
+                        lockandtr[2].hookargs[b'source'] = source
+                        lockandtr[2].hookargs[b'url'] = url
+                        lockandtr[2].hookargs[b'bundle2'] = b'1'
                     return lockandtr[2]
 
                 # Do greedy locking by default until we're satisfied with lazy
                 # locking.
-                if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
+                if not repo.ui.configbool(
+                    b'experimental', b'bundle2lazylocking'
+                ):
                     gettransaction()
 
                 op = bundle2.bundleoperation(
                     repo,
                     gettransaction,
                     captureoutput=captureoutput,
-                    source='push',
+                    source=b'push',
                 )
                 try:
                     op = bundle2.processbundle(repo, cg, op=op)
@@ -2748,7 +2766,7 @@
                         repo.ui.pushbuffer(error=True, subproc=True)
 
                         def recordout(output):
-                            r.newpart('output', data=output, mandatory=False)
+                            r.newpart(b'output', data=output, mandatory=False)
 
                 if lockandtr[2] is not None:
                     lockandtr[2].close()
@@ -2759,7 +2777,7 @@
 
                     def recordout(output):
                         part = bundle2.bundlepart(
-                            'output', data=output, mandatory=False
+                            b'output', data=output, mandatory=False
                         )
                         parts.append(part)
 
@@ -2777,7 +2795,7 @@
     repo = pullop.repo
     remote = pullop.remote
 
-    if not repo.ui.configbool('ui', 'clonebundles'):
+    if not repo.ui.configbool(b'ui', b'clonebundles'):
         return
 
     # Only run if local repo is empty.
@@ -2787,11 +2805,11 @@
     if pullop.heads:
         return
 
-    if not remote.capable('clonebundles'):
+    if not remote.capable(b'clonebundles'):
         return
 
     with remote.commandexecutor() as e:
-        res = e.callcommand('clonebundles', {}).result()
+        res = e.callcommand(b'clonebundles', {}).result()
 
     # If we call the wire protocol command, that's good enough to record the
     # attempt.
@@ -2801,8 +2819,8 @@
     if not entries:
         repo.ui.note(
             _(
-                'no clone bundles available on remote; '
-                'falling back to regular clone\n'
+                b'no clone bundles available on remote; '
+                b'falling back to regular clone\n'
             )
         )
         return
@@ -2819,36 +2837,36 @@
         # clone!
         repo.ui.warn(
             _(
-                'no compatible clone bundles available on server; '
-                'falling back to regular clone\n'
+                b'no compatible clone bundles available on server; '
+                b'falling back to regular clone\n'
             )
         )
         repo.ui.warn(
-            _('(you may want to report this to the server ' 'operator)\n')
+            _(b'(you may want to report this to the server ' b'operator)\n')
         )
         return
 
     entries = sortclonebundleentries(repo.ui, entries)
 
-    url = entries[0]['URL']
-    repo.ui.status(_('applying clone bundle from %s\n') % url)
+    url = entries[0][b'URL']
+    repo.ui.status(_(b'applying clone bundle from %s\n') % url)
     if trypullbundlefromurl(repo.ui, repo, url):
-        repo.ui.status(_('finished applying clone bundle\n'))
+        repo.ui.status(_(b'finished applying clone bundle\n'))
     # Bundle failed.
     #
     # We abort by default to avoid the thundering herd of
     # clients flooding a server that was expecting expensive
     # clone load to be offloaded.
-    elif repo.ui.configbool('ui', 'clonebundlefallback'):
-        repo.ui.warn(_('falling back to normal clone\n'))
+    elif repo.ui.configbool(b'ui', b'clonebundlefallback'):
+        repo.ui.warn(_(b'falling back to normal clone\n'))
     else:
         raise error.Abort(
-            _('error applying bundle'),
+            _(b'error applying bundle'),
             hint=_(
-                'if this error persists, consider contacting '
-                'the server operator or disable clone '
-                'bundles via '
-                '"--config ui.clonebundles=false"'
+                b'if this error persists, consider contacting '
+                b'the server operator or disable clone '
+                b'bundles via '
+                b'"--config ui.clonebundles=false"'
             ),
         )
 
@@ -2864,9 +2882,9 @@
         fields = line.split()
         if not fields:
             continue
-        attrs = {'URL': fields[0]}
+        attrs = {b'URL': fields[0]}
         for rawattr in fields[1:]:
-            key, value = rawattr.split('=', 1)
+            key, value = rawattr.split(b'=', 1)
             key = urlreq.unquote(key)
             value = urlreq.unquote(value)
             attrs[key] = value
@@ -2874,11 +2892,11 @@
             # Parse BUNDLESPEC into components. This makes client-side
             # preferences easier to specify since you can prefer a single
             # component of the BUNDLESPEC.
-            if key == 'BUNDLESPEC':
+            if key == b'BUNDLESPEC':
                 try:
                     bundlespec = parsebundlespec(repo, value)
-                    attrs['COMPRESSION'] = bundlespec.compression
-                    attrs['VERSION'] = bundlespec.version
+                    attrs[b'COMPRESSION'] = bundlespec.compression
+                    attrs[b'VERSION'] = bundlespec.version
                 except error.InvalidBundleSpecification:
                     pass
                 except error.UnsupportedBundleSpecification:
@@ -2891,14 +2909,14 @@
 
 def isstreamclonespec(bundlespec):
     # Stream clone v1
-    if bundlespec.wirecompression == 'UN' and bundlespec.wireversion == 's1':
+    if bundlespec.wirecompression == b'UN' and bundlespec.wireversion == b's1':
         return True
 
     # Stream clone v2
     if (
-        bundlespec.wirecompression == 'UN'
-        and bundlespec.wireversion == '02'
-        and bundlespec.contentopts.get('streamv2')
+        bundlespec.wirecompression == b'UN'
+        and bundlespec.wireversion == b'02'
+        and bundlespec.contentopts.get(b'streamv2')
     ):
         return True
 
@@ -2917,7 +2935,7 @@
     """
     newentries = []
     for entry in entries:
-        spec = entry.get('BUNDLESPEC')
+        spec = entry.get(b'BUNDLESPEC')
         if spec:
             try:
                 bundlespec = parsebundlespec(repo, spec, strict=True)
@@ -2926,32 +2944,32 @@
                 # entries.
                 if streamclonerequested and not isstreamclonespec(bundlespec):
                     repo.ui.debug(
-                        'filtering %s because not a stream clone\n'
-                        % entry['URL']
+                        b'filtering %s because not a stream clone\n'
+                        % entry[b'URL']
                     )
                     continue
 
             except error.InvalidBundleSpecification as e:
-                repo.ui.debug(stringutil.forcebytestr(e) + '\n')
+                repo.ui.debug(stringutil.forcebytestr(e) + b'\n')
                 continue
             except error.UnsupportedBundleSpecification as e:
                 repo.ui.debug(
-                    'filtering %s because unsupported bundle '
-                    'spec: %s\n' % (entry['URL'], stringutil.forcebytestr(e))
+                    b'filtering %s because unsupported bundle '
+                    b'spec: %s\n' % (entry[b'URL'], stringutil.forcebytestr(e))
                 )
                 continue
         # If we don't have a spec and requested a stream clone, we don't know
         # what the entry is so don't attempt to apply it.
         elif streamclonerequested:
             repo.ui.debug(
-                'filtering %s because cannot determine if a stream '
-                'clone bundle\n' % entry['URL']
+                b'filtering %s because cannot determine if a stream '
+                b'clone bundle\n' % entry[b'URL']
             )
             continue
 
-        if 'REQUIRESNI' in entry and not sslutil.hassni:
+        if b'REQUIRESNI' in entry and not sslutil.hassni:
             repo.ui.debug(
-                'filtering %s because SNI not supported\n' % entry['URL']
+                b'filtering %s because SNI not supported\n' % entry[b'URL']
             )
             continue
 
@@ -3026,11 +3044,11 @@
 
 
 def sortclonebundleentries(ui, entries):
-    prefers = ui.configlist('ui', 'clonebundleprefers')
+    prefers = ui.configlist(b'ui', b'clonebundleprefers')
     if not prefers:
         return list(entries)
 
-    prefers = [p.split('=', 1) for p in prefers]
+    prefers = [p.split(b'=', 1) for p in prefers]
 
     items = sorted(clonebundleentry(v, prefers) for v in entries)
     return [i.value for i in items]
@@ -3038,24 +3056,24 @@
 
 def trypullbundlefromurl(ui, repo, url):
     """Attempt to apply a bundle from a URL."""
-    with repo.lock(), repo.transaction('bundleurl') as tr:
+    with repo.lock(), repo.transaction(b'bundleurl') as tr:
         try:
             fh = urlmod.open(ui, url)
-            cg = readbundle(ui, fh, 'stream')
+            cg = readbundle(ui, fh, b'stream')
 
             if isinstance(cg, streamclone.streamcloneapplier):
                 cg.apply(repo)
             else:
-                bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
+                bundle2.applybundle(repo, cg, tr, b'clonebundles', url)
             return True
         except urlerr.httperror as e:
             ui.warn(
-                _('HTTP error fetching bundle: %s\n')
+                _(b'HTTP error fetching bundle: %s\n')
                 % stringutil.forcebytestr(e)
             )
         except urlerr.urlerror as e:
             ui.warn(
-                _('error fetching bundle: %s\n')
+                _(b'error fetching bundle: %s\n')
                 % stringutil.forcebytestr(e.reason)
             )
 
--- a/mercurial/exchangev2.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/exchangev2.py	Sun Oct 06 09:48:39 2019 -0400
@@ -39,7 +39,7 @@
     # incremental pull. This is somewhat hacky and is not nearly robust enough
     # for long-term usage.
     if usingrawchangelogandmanifest:
-        with repo.transaction('clone'):
+        with repo.transaction(b'clone'):
             _fetchrawstorefiles(repo, remote)
             repo.invalidate(clearfilecache=True)
 
@@ -51,7 +51,7 @@
         repo.root,
         # Empty maps to nevermatcher. So always
         # set includes if missing.
-        pullop.includepats or {'path:.'},
+        pullop.includepats or {b'path:.'},
         pullop.excludepats,
     )
 
@@ -78,32 +78,32 @@
 
     # Ensure all new changesets are draft by default. If the repo is
     # publishing, the phase will be adjusted by the loop below.
-    if csetres['added']:
-        phases.registernew(repo, tr, phases.draft, csetres['added'])
+    if csetres[b'added']:
+        phases.registernew(repo, tr, phases.draft, csetres[b'added'])
 
     # And adjust the phase of all changesets accordingly.
     for phase in phases.phasenames:
-        if phase == b'secret' or not csetres['nodesbyphase'][phase]:
+        if phase == b'secret' or not csetres[b'nodesbyphase'][phase]:
             continue
 
         phases.advanceboundary(
             repo,
             tr,
             phases.phasenames.index(phase),
-            csetres['nodesbyphase'][phase],
+            csetres[b'nodesbyphase'][phase],
         )
 
     # Write bookmark updates.
     bookmarks.updatefromremote(
         repo.ui,
         repo,
-        csetres['bookmarks'],
+        csetres[b'bookmarks'],
         remote.url(),
         pullop.gettransaction,
         explicit=pullop.explicitbookmarks,
     )
 
-    manres = _fetchmanifests(repo, tr, remote, csetres['manifestnodes'])
+    manres = _fetchmanifests(repo, tr, remote, csetres[b'manifestnodes'])
 
     # We don't properly support shallow changeset and manifest yet. So we apply
     # depth limiting locally.
@@ -142,9 +142,9 @@
             manifestlinkrevs[mnode] = rev
 
     else:
-        csetsforfiles = [n for n in csetres['added'] if csetrelevantfilter(n)]
-        mnodesforfiles = manres['added']
-        manifestlinkrevs = manres['linkrevs']
+        csetsforfiles = [n for n in csetres[b'added'] if csetrelevantfilter(n)]
+        mnodesforfiles = manres[b'added']
+        manifestlinkrevs = manres[b'linkrevs']
 
     # Find all file nodes referenced by added manifests and fetch those
     # revisions.
@@ -197,7 +197,7 @@
         overall = next(objs)
 
         progress = repo.ui.makeprogress(
-            _('clone'), total=overall[b'totalsize'], unit=_('bytes')
+            _(b'clone'), total=overall[b'totalsize'], unit=_(b'bytes')
         )
         with progress:
             progress.update(0)
@@ -330,7 +330,7 @@
 
 
 def _processchangesetdata(repo, tr, objs):
-    repo.hook('prechangegroup', throw=True, **pycompat.strkwargs(tr.hookargs))
+    repo.hook(b'prechangegroup', throw=True, **pycompat.strkwargs(tr.hookargs))
 
     urepo = repo.unfiltered()
     cl = urepo.changelog
@@ -342,13 +342,13 @@
     meta = next(objs)
 
     progress = repo.ui.makeprogress(
-        _('changesets'), unit=_('chunks'), total=meta.get(b'totalitems')
+        _(b'changesets'), unit=_(b'chunks'), total=meta.get(b'totalitems')
     )
 
     manifestnodes = {}
 
     def linkrev(node):
-        repo.ui.debug('add changeset %s\n' % short(node))
+        repo.ui.debug(b'add changeset %s\n' % short(node))
         # Linkrev for changelog is always self.
         return len(cl)
 
@@ -413,10 +413,10 @@
     progress.complete()
 
     return {
-        'added': added,
-        'nodesbyphase': nodesbyphase,
-        'bookmarks': remotebookmarks,
-        'manifestnodes': manifestnodes,
+        b'added': added,
+        b'nodesbyphase': nodesbyphase,
+        b'bookmarks': remotebookmarks,
+        b'manifestnodes': manifestnodes,
     }
 
 
@@ -483,7 +483,7 @@
             progress.increment()
 
     progress = repo.ui.makeprogress(
-        _('manifests'), unit=_('chunks'), total=len(fetchnodes)
+        _(b'manifests'), unit=_(b'chunks'), total=len(fetchnodes)
     )
 
     commandmeta = remote.apidescriptor[b'commands'][b'manifestdata']
@@ -530,8 +530,8 @@
     progress.complete()
 
     return {
-        'added': added,
-        'linkrevs': linkrevs,
+        b'added': added,
+        b'linkrevs': linkrevs,
     }
 
 
@@ -545,7 +545,7 @@
     fnodes = collections.defaultdict(dict)
 
     progress = repo.ui.makeprogress(
-        _('scanning manifests'), total=len(manifestnodes)
+        _(b'scanning manifests'), total=len(manifestnodes)
     )
 
     with progress:
@@ -605,8 +605,8 @@
             progress.increment()
 
     progress = repo.ui.makeprogress(
-        _('files'),
-        unit=_('chunks'),
+        _(b'files'),
+        unit=_(b'chunks'),
         total=sum(len(v) for v in fnodes.itervalues()),
     )
 
@@ -704,8 +704,8 @@
             remaining -= 1
 
     progress = repo.ui.makeprogress(
-        _('files'),
-        unit=_('chunks'),
+        _(b'files'),
+        unit=_(b'chunks'),
         total=sum(len(v) for v in fnodes.itervalues()),
     )
 
--- a/mercurial/extensions.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/extensions.py	Sun Oct 06 09:48:39 2019 -0400
@@ -34,15 +34,15 @@
 _aftercallbacks = {}
 _order = []
 _builtin = {
-    'hbisect',
-    'bookmarks',
-    'color',
-    'parentrevspec',
-    'progress',
-    'interhg',
-    'inotify',
-    'hgcia',
-    'shelve',
+    b'hbisect',
+    b'bookmarks',
+    b'color',
+    b'parentrevspec',
+    b'progress',
+    b'interhg',
+    b'inotify',
+    b'hgcia',
+    b'shelve',
 }
 
 
@@ -50,9 +50,9 @@
     if ui:
 
         def enabled(name):
-            for format in ['%s', 'hgext.%s']:
-                conf = ui.config('extensions', format % name)
-                if conf is not None and not conf.startswith('!'):
+            for format in [b'%s', b'hgext.%s']:
+                conf = ui.config(b'extensions', format % name)
+                if conf is not None and not conf.startswith(b'!'):
                     return True
 
     else:
@@ -70,7 +70,7 @@
         mod = _extensions[name]
     except KeyError:
         for k, v in _extensions.iteritems():
-            if k.endswith('.' + name) or k.endswith('/' + name):
+            if k.endswith(b'.' + name) or k.endswith(b'/' + name):
                 mod = v
                 break
     if not mod:
@@ -79,7 +79,7 @@
 
 
 def loadpath(path, module_name):
-    module_name = module_name.replace('.', '_')
+    module_name = module_name.replace(b'.', b'_')
     path = util.normpath(util.expandpath(path))
     module_name = pycompat.fsdecode(module_name)
     path = pycompat.fsdecode(path)
@@ -100,7 +100,7 @@
 def _importh(name):
     """import and return the <name> module"""
     mod = __import__(pycompat.sysstr(name))
-    components = name.split('.')
+    components = name.split(b'.')
     for comp in components[1:]:
         mod = getattr(mod, comp)
     return mod
@@ -111,18 +111,18 @@
         # the module will be loaded in sys.modules
         # choose an unique name so that it doesn't
         # conflicts with other modules
-        mod = loadpath(path, 'hgext.%s' % name)
+        mod = loadpath(path, b'hgext.%s' % name)
     else:
         try:
-            mod = _importh("hgext.%s" % name)
+            mod = _importh(b"hgext.%s" % name)
         except ImportError as err:
             if reportfunc:
-                reportfunc(err, "hgext.%s" % name, "hgext3rd.%s" % name)
+                reportfunc(err, b"hgext.%s" % name, b"hgext3rd.%s" % name)
             try:
-                mod = _importh("hgext3rd.%s" % name)
+                mod = _importh(b"hgext3rd.%s" % name)
             except ImportError as err:
                 if reportfunc:
-                    reportfunc(err, "hgext3rd.%s" % name, name)
+                    reportfunc(err, b"hgext3rd.%s" % name, name)
                 mod = _importh(name)
     return mod
 
@@ -137,7 +137,7 @@
         stringutil.forcebytestr(err),
         next,
     )
-    if ui.debugflag and ui.configbool('devel', 'debug.extensions'):
+    if ui.debugflag and ui.configbool(b'devel', b'debug.extensions'):
         ui.traceback()
 
 
@@ -152,12 +152,12 @@
     elif isinstance(xs, type(u'')):
         raise error.ProgrammingError(
             b"unicode %r found in %s" % (xs, name),
-            hint="use b'' to make it byte string",
+            hint=b"use b'' to make it byte string",
         )
 
 
 # attributes set by registrar.command
-_cmdfuncattrs = ('norepo', 'optionalrepo', 'inferrepo')
+_cmdfuncattrs = (b'norepo', b'optionalrepo', b'inferrepo')
 
 
 def _validatecmdtable(ui, cmdtable):
@@ -168,22 +168,22 @@
         if not missing:
             continue
         raise error.ProgrammingError(
-            'missing attributes: %s' % ', '.join(missing),
-            hint="use @command decorator to register '%s'" % c,
+            b'missing attributes: %s' % b', '.join(missing),
+            hint=b"use @command decorator to register '%s'" % c,
         )
 
 
 def _validatetables(ui, mod):
     """Sanity check for loadable tables provided by extension module"""
-    for t in ['cmdtable', 'colortable', 'configtable']:
+    for t in [b'cmdtable', b'colortable', b'configtable']:
         _rejectunicode(t, getattr(mod, t, {}))
     for t in [
-        'filesetpredicate',
-        'internalmerge',
-        'revsetpredicate',
-        'templatefilter',
-        'templatefunc',
-        'templatekeyword',
+        b'filesetpredicate',
+        b'internalmerge',
+        b'revsetpredicate',
+        b'templatefilter',
+        b'templatefunc',
+        b'templatekeyword',
     ]:
         o = getattr(mod, t, None)
         if o:
@@ -192,7 +192,7 @@
 
 
 def load(ui, name, path, loadingtime=None):
-    if name.startswith('hgext.') or name.startswith('hgext/'):
+    if name.startswith(b'hgext.') or name.startswith(b'hgext/'):
         shortname = name[6:]
     else:
         shortname = name
@@ -202,7 +202,7 @@
         return _extensions[shortname]
     ui.log(b'extension', b'  - loading extension: %s\n', shortname)
     _extensions[shortname] = None
-    with util.timedcm('load extension %s', shortname) as stats:
+    with util.timedcm(b'load extension %s', shortname) as stats:
         mod = _importext(name, path, bind(_reportimporterror, ui))
     ui.log(b'extension', b'  > %s extension loaded in %s\n', shortname, stats)
     if loadingtime is not None:
@@ -215,8 +215,8 @@
     minver = getattr(mod, 'minimumhgversion', None)
     if minver and util.versiontuple(minver, 2) > util.versiontuple(n=2):
         msg = _(
-            '(third party extension %s requires version %s or newer '
-            'of Mercurial (current: %s); disabling)\n'
+            b'(third party extension %s requires version %s or newer '
+            b'of Mercurial (current: %s); disabling)\n'
         )
         ui.warn(msg % (shortname, minver, util.version()))
         return
@@ -228,7 +228,7 @@
     ui.log(
         b'extension', b'    - invoking registered callbacks: %s\n', shortname
     )
-    with util.timedcm('callbacks extension %s', shortname) as stats:
+    with util.timedcm(b'callbacks extension %s', shortname) as stats:
         for fn in _aftercallbacks.get(shortname, []):
             fn(loaded=True)
     ui.log(b'extension', b'    > callbacks completed in %s\n', stats)
@@ -243,7 +243,7 @@
         except Exception as inst:
             ui.traceback(force=True)
             msg = stringutil.forcebytestr(inst)
-            ui.warn(_("*** failed to set up extension %s: %s\n") % (name, msg))
+            ui.warn(_(b"*** failed to set up extension %s: %s\n") % (name, msg))
             return False
     return True
 
@@ -256,27 +256,27 @@
         except Exception as inst:
             ui.traceback(force=True)
             msg = stringutil.forcebytestr(inst)
-            ui.warn(_("*** failed to set up extension %s: %s\n") % (name, msg))
+            ui.warn(_(b"*** failed to set up extension %s: %s\n") % (name, msg))
             return False
     return True
 
 
 def loadall(ui, whitelist=None):
     loadingtime = collections.defaultdict(int)
-    result = ui.configitems("extensions")
+    result = ui.configitems(b"extensions")
     if whitelist is not None:
         result = [(k, v) for (k, v) in result if k in whitelist]
     newindex = len(_order)
     ui.log(
         b'extension',
         b'loading %sextensions\n',
-        'additional ' if newindex else '',
+        b'additional ' if newindex else b'',
     )
     ui.log(b'extension', b'- processing %d entries\n', len(result))
-    with util.timedcm('load all extensions') as stats:
+    with util.timedcm(b'load all extensions') as stats:
         for (name, path) in result:
             if path:
-                if path[0:1] == '!':
+                if path[0:1] == b'!':
                     if name not in _disabledextensions:
                         ui.log(
                             b'extension',
@@ -291,16 +291,16 @@
                 msg = stringutil.forcebytestr(inst)
                 if path:
                     ui.warn(
-                        _("*** failed to import extension %s from %s: %s\n")
+                        _(b"*** failed to import extension %s from %s: %s\n")
                         % (name, path, msg)
                     )
                 else:
                     ui.warn(
-                        _("*** failed to import extension %s: %s\n")
+                        _(b"*** failed to import extension %s: %s\n")
                         % (name, msg)
                     )
                 if isinstance(inst, error.Hint) and inst.hint:
-                    ui.warn(_("*** (%s)\n") % inst.hint)
+                    ui.warn(_(b"*** (%s)\n") % inst.hint)
                 ui.traceback()
 
     ui.log(
@@ -318,7 +318,7 @@
     #
     # This one is for the list of item that must be run before running any setup
     earlyextraloaders = [
-        ('configtable', configitems, 'loadconfigtable'),
+        (b'configtable', configitems, b'loadconfigtable'),
     ]
 
     ui.log(b'extension', b'- loading configtable attributes\n')
@@ -326,10 +326,10 @@
 
     broken = set()
     ui.log(b'extension', b'- executing uisetup hooks\n')
-    with util.timedcm('all uisetup') as alluisetupstats:
+    with util.timedcm(b'all uisetup') as alluisetupstats:
         for name in _order[newindex:]:
             ui.log(b'extension', b'  - running uisetup for %s\n', name)
-            with util.timedcm('uisetup %s', name) as stats:
+            with util.timedcm(b'uisetup %s', name) as stats:
                 if not _runuisetup(name, ui):
                     ui.log(
                         b'extension',
@@ -342,12 +342,12 @@
     ui.log(b'extension', b'> all uisetup took %s\n', alluisetupstats)
 
     ui.log(b'extension', b'- executing extsetup hooks\n')
-    with util.timedcm('all extsetup') as allextetupstats:
+    with util.timedcm(b'all extsetup') as allextetupstats:
         for name in _order[newindex:]:
             if name in broken:
                 continue
             ui.log(b'extension', b'  - running extsetup for %s\n', name)
-            with util.timedcm('extsetup %s', name) as stats:
+            with util.timedcm(b'extsetup %s', name) as stats:
                 if not _runextsetup(name, ui):
                     ui.log(
                         b'extension',
@@ -365,7 +365,7 @@
 
     # Call aftercallbacks that were never met.
     ui.log(b'extension', b'- executing remaining aftercallbacks\n')
-    with util.timedcm('aftercallbacks') as stats:
+    with util.timedcm(b'aftercallbacks') as stats:
         for shortname in _aftercallbacks:
             if shortname in _extensions:
                 continue
@@ -403,16 +403,16 @@
     #   which takes (ui, extensionname, extraobj) arguments
     ui.log(b'extension', b'- loading extension registration objects\n')
     extraloaders = [
-        ('cmdtable', commands, 'loadcmdtable'),
-        ('colortable', color, 'loadcolortable'),
-        ('filesetpredicate', fileset, 'loadpredicate'),
-        ('internalmerge', filemerge, 'loadinternalmerge'),
-        ('revsetpredicate', revset, 'loadpredicate'),
-        ('templatefilter', templatefilters, 'loadfilter'),
-        ('templatefunc', templatefuncs, 'loadfunction'),
-        ('templatekeyword', templatekw, 'loadkeyword'),
+        (b'cmdtable', commands, b'loadcmdtable'),
+        (b'colortable', color, b'loadcolortable'),
+        (b'filesetpredicate', fileset, b'loadpredicate'),
+        (b'internalmerge', filemerge, b'loadinternalmerge'),
+        (b'revsetpredicate', revset, b'loadpredicate'),
+        (b'templatefilter', templatefilters, b'loadfilter'),
+        (b'templatefunc', templatefuncs, b'loadfunction'),
+        (b'templatekeyword', templatekw, b'loadkeyword'),
     ]
-    with util.timedcm('load registration objects') as stats:
+    with util.timedcm(b'load registration objects') as stats:
         _loadextra(ui, newindex, extraloaders)
     ui.log(
         b'extension',
@@ -482,7 +482,7 @@
         except Exception as inst:
             ui.traceback(force=True)
             ui.warn(
-                _('*** failed to populate ui by extension %s: %s\n')
+                _(b'*** failed to populate ui by extension %s: %s\n')
                 % (name, stringutil.forcebytestr(inst))
             )
 
@@ -709,15 +709,15 @@
 
     exts = {}
     for e in files:
-        if e.endswith('.py'):
-            name = e.rsplit('.', 1)[0]
+        if e.endswith(b'.py'):
+            name = e.rsplit(b'.', 1)[0]
             path = os.path.join(extpath, e)
         else:
             name = e
-            path = os.path.join(extpath, e, '__init__.py')
+            path = os.path.join(extpath, e, b'__init__.py')
             if not os.path.exists(path):
                 continue
-        if name in exts or name in _order or name == '__init__':
+        if name in exts or name in _order or name == b'__init__':
             continue
         exts[name] = path
     for name, path in _disabledextensions.iteritems():
@@ -737,13 +737,13 @@
     result = []
 
     line = file.readline()
-    while line[:1] == '#' or not line.strip():
+    while line[:1] == b'#' or not line.strip():
         line = file.readline()
         if not line:
             break
 
     start = line[:3]
-    if start == '"""' or start == "'''":
+    if start == b'"""' or start == b"'''":
         line = line[3:]
         while line:
             if line.rstrip().endswith(start):
@@ -758,13 +758,13 @@
     else:
         return None
 
-    return ''.join(result)
+    return b''.join(result)
 
 
 def _disabledhelp(path):
     '''retrieve help synopsis of a disabled extension (without importing)'''
     try:
-        with open(path, 'rb') as src:
+        with open(path, b'rb') as src:
             doc = _moduledoc(src)
     except IOError:
         return
@@ -772,7 +772,7 @@
     if doc:  # extracting localized synopsis
         return gettext(doc)
     else:
-        return _('(no help text available)')
+        return _(b'(no help text available)')
 
 
 def disabled():
@@ -841,7 +841,7 @@
 
     This may raise IOError or SyntaxError.
     """
-    with open(path, 'rb') as src:
+    with open(path, b'rb') as src:
         root = ast.parse(src.read(), path)
     cmdtable = {}
     for node in _walkcommand(root):
@@ -906,9 +906,9 @@
     '''return a dict of {name: desc} of extensions'''
     exts = {}
     for ename, ext in extensions():
-        doc = gettext(ext.__doc__) or _('(no help text available)')
+        doc = gettext(ext.__doc__) or _(b'(no help text available)')
         if shortname:
-            ename = ename.split('.')[-1]
+            ename = ename.split(b'.')[-1]
         exts[ename] = doc.splitlines()[0].strip()
 
     return exts
@@ -921,17 +921,17 @@
 
 def moduleversion(module):
     '''return version information from given module as a string'''
-    if util.safehasattr(module, 'getversion') and callable(module.getversion):
+    if util.safehasattr(module, b'getversion') and callable(module.getversion):
         version = module.getversion()
-    elif util.safehasattr(module, '__version__'):
+    elif util.safehasattr(module, b'__version__'):
         version = module.__version__
     else:
-        version = ''
+        version = b''
     if isinstance(version, (list, tuple)):
-        version = '.'.join(pycompat.bytestr(o) for o in version)
+        version = b'.'.join(pycompat.bytestr(o) for o in version)
     return version
 
 
 def ismoduleinternal(module):
     exttestedwith = getattr(module, 'testedwith', None)
-    return exttestedwith == "ships-with-hg-core"
+    return exttestedwith == b"ships-with-hg-core"
--- a/mercurial/exthelper.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/exthelper.py	Sun Oct 06 09:48:39 2019 -0400
@@ -273,9 +273,9 @@
         else:
             for opt in opts:
                 if not isinstance(opt, tuple):
-                    raise error.ProgrammingError('opts must be list of tuples')
+                    raise error.ProgrammingError(b'opts must be list of tuples')
                 if len(opt) not in (4, 5):
-                    msg = 'each opt tuple must contain 4 or 5 values'
+                    msg = b'each opt tuple must contain 4 or 5 values'
                     raise error.ProgrammingError(msg)
 
         def dec(wrapper):
--- a/mercurial/fancyopts.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/fancyopts.py	Sun Oct 06 09:48:39 2019 -0400
@@ -19,12 +19,12 @@
 # Set of flags to not apply boolean negation logic on
 nevernegate = {
     # avoid --no-noninteractive
-    'noninteractive',
+    b'noninteractive',
     # These two flags are special because they cause hg to do one
     # thing and then exit, and so aren't suitable for use in things
     # like aliases anyway.
-    'help',
-    'version',
+    b'help',
+    b'version',
 }
 
 
@@ -77,18 +77,18 @@
     >>> opt(b'-:foo')
     ('', False, '', False)
     """
-    if arg.startswith('--'):
-        flag, eq, val = arg.partition('=')
+    if arg.startswith(b'--'):
+        flag, eq, val = arg.partition(b'=')
         if flag[2:] in namelist:
             return flag, bool(eq), val, False
-        if flag[2:] + '=' in namelist:
+        if flag[2:] + b'=' in namelist:
             return flag, bool(eq), val, True
-    elif arg.startswith('-') and arg != '-' and not arg.startswith('-:'):
+    elif arg.startswith(b'-') and arg != b'-' and not arg.startswith(b'-:'):
         flag, val = arg[:2], arg[2:]
         i = shortlist.find(flag[1:])
         if i >= 0:
-            return flag, bool(val), val, shortlist.startswith(':', i + 1)
-    return '', False, '', False
+            return flag, bool(val), val, shortlist.startswith(b':', i + 1)
+    return b'', False, b'', False
 
 
 def earlygetopt(args, shortlist, namelist, gnu=False, keepsep=False):
@@ -178,7 +178,7 @@
     pos = 0
     while pos < len(args):
         arg = args[pos]
-        if arg == '--':
+        if arg == b'--':
             pos += not keepsep
             break
         flag, hasval, val, takeval = _earlyoptarg(arg, shortlist, namelist)
@@ -261,7 +261,7 @@
         try:
             return int(newparam)
         except ValueError:
-            abort(_('expected int'))
+            abort(_(b'expected int'))
 
 
 def _defaultopt(default):
@@ -310,7 +310,7 @@
     if optaliases is None:
         optaliases = {}
     namelist = []
-    shortlist = ''
+    shortlist = b''
     argmap = {}
     defmap = {}
     negations = {}
@@ -324,11 +324,11 @@
         # convert opts to getopt format
         onames = [name]
         onames.extend(optaliases.get(name, []))
-        name = name.replace('-', '_')
+        name = name.replace(b'-', b'_')
 
-        argmap['-' + short] = name
+        argmap[b'-' + short] = name
         for n in onames:
-            argmap['--' + n] = name
+            argmap[b'--' + n] = name
         defmap[name] = _defaultopt(default)
 
         # copy defaults to state
@@ -337,20 +337,20 @@
         # does it take a parameter?
         if not defmap[name]._isboolopt():
             if short:
-                short += ':'
-            onames = [n + '=' for n in onames]
+                short += b':'
+            onames = [n + b'=' for n in onames]
         elif name not in nevernegate:
             for n in onames:
-                if n.startswith('no-'):
+                if n.startswith(b'no-'):
                     insert = n[3:]
                 else:
-                    insert = 'no-' + n
+                    insert = b'no-' + n
                 # backout (as a practical example) has both --commit and
                 # --no-commit options, so we don't want to allow the
                 # negations of those flags.
                 if insert not in alllong:
-                    assert ('--' + n) not in negations
-                    negations['--' + insert] = '--' + n
+                    assert (b'--' + n) not in negations
+                    negations[b'--' + insert] = b'--' + n
                     namelist.append(insert)
         if short:
             shortlist += short
@@ -381,7 +381,7 @@
 
             def abort(s):
                 raise error.Abort(
-                    _('invalid value %r for option %s, %s')
+                    _(b'invalid value %r for option %s, %s')
                     % (pycompat.maybebytestr(val), opt, s)
                 )
 
--- a/mercurial/filelog.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/filelog.py	Sun Oct 06 09:48:39 2019 -0400
@@ -27,7 +27,7 @@
 class filelog(object):
     def __init__(self, opener, path):
         self._revlog = revlog.revlog(
-            opener, '/'.join(('data', path + '.i')), censorable=True
+            opener, b'/'.join((b'data', path + b'.i')), censorable=True
         )
         # Full name of the user visible file, relative to the repository root.
         # Used by LFS.
@@ -144,8 +144,8 @@
         if maybemissingparents:
             raise error.Abort(
                 _(
-                    'revlog storage does not support missing '
-                    'parents write mode'
+                    b'revlog storage does not support missing '
+                    b'parents write mode'
                 )
             )
 
@@ -169,7 +169,7 @@
         return storageutil.filtermetadata(self.revision(node))
 
     def add(self, text, meta, transaction, link, p1=None, p2=None):
-        if meta or text.startswith('\1\n'):
+        if meta or text.startswith(b'\1\n'):
             text = storageutil.packmeta(meta, text)
         return self.addrevision(text, transaction, link, p1, p2)
 
@@ -230,7 +230,7 @@
     # Used by repo upgrade.
     def clone(self, tr, destrevlog, **kwargs):
         if not isinstance(destrevlog, filelog):
-            raise error.ProgrammingError('expected filelog to clone()')
+            raise error.ProgrammingError(b'expected filelog to clone()')
 
         return self._revlog.clone(tr, destrevlog._revlog, **kwargs)
 
--- a/mercurial/filemerge.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/filemerge.py	Sun Oct 06 09:48:39 2019 -0400
@@ -42,15 +42,15 @@
 
 
 def _toolstr(ui, tool, part, *args):
-    return ui.config("merge-tools", tool + "." + part, *args)
+    return ui.config(b"merge-tools", tool + b"." + part, *args)
 
 
 def _toolbool(ui, tool, part, *args):
-    return ui.configbool("merge-tools", tool + "." + part, *args)
+    return ui.configbool(b"merge-tools", tool + b"." + part, *args)
 
 
 def _toollist(ui, tool, part):
-    return ui.configlist("merge-tools", tool + "." + part)
+    return ui.configlist(b"merge-tools", tool + b"." + part)
 
 
 internals = {}
@@ -69,17 +69,17 @@
 # languages that may take more columns to still have a chance to fit in an
 # 80-column screen).
 _localchangedotherdeletedmsg = _(
-    "file '%(fd)s' was deleted in other%(o)s but was modified in local%(l)s.\n"
-    "You can use (c)hanged version, (d)elete, or leave (u)nresolved.\n"
-    "What do you want to do?"
-    "$$ &Changed $$ &Delete $$ &Unresolved"
+    b"file '%(fd)s' was deleted in other%(o)s but was modified in local%(l)s.\n"
+    b"You can use (c)hanged version, (d)elete, or leave (u)nresolved.\n"
+    b"What do you want to do?"
+    b"$$ &Changed $$ &Delete $$ &Unresolved"
 )
 
 _otherchangedlocaldeletedmsg = _(
-    "file '%(fd)s' was deleted in local%(l)s but was modified in other%(o)s.\n"
-    "You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.\n"
-    "What do you want to do?"
-    "$$ &Changed $$ &Deleted $$ &Unresolved"
+    b"file '%(fd)s' was deleted in local%(l)s but was modified in other%(o)s.\n"
+    b"You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.\n"
+    b"What do you want to do?"
+    b"$$ &Changed $$ &Deleted $$ &Unresolved"
 )
 
 
@@ -120,7 +120,7 @@
         )
 
     def flags(self):
-        return ''
+        return b''
 
     def changectx(self):
         return self._ctx
@@ -135,34 +135,34 @@
 def _findtool(ui, tool):
     if tool in internals:
         return tool
-    cmd = _toolstr(ui, tool, "executable", tool)
-    if cmd.startswith('python:'):
+    cmd = _toolstr(ui, tool, b"executable", tool)
+    if cmd.startswith(b'python:'):
         return cmd
     return findexternaltool(ui, tool)
 
 
 def _quotetoolpath(cmd):
-    if cmd.startswith('python:'):
+    if cmd.startswith(b'python:'):
         return cmd
     return procutil.shellquote(cmd)
 
 
 def findexternaltool(ui, tool):
-    for kn in ("regkey", "regkeyalt"):
+    for kn in (b"regkey", b"regkeyalt"):
         k = _toolstr(ui, tool, kn)
         if not k:
             continue
-        p = util.lookupreg(k, _toolstr(ui, tool, "regname"))
+        p = util.lookupreg(k, _toolstr(ui, tool, b"regname"))
         if p:
-            p = procutil.findexe(p + _toolstr(ui, tool, "regappend", ""))
+            p = procutil.findexe(p + _toolstr(ui, tool, b"regappend", b""))
             if p:
                 return p
-    exe = _toolstr(ui, tool, "executable", tool)
+    exe = _toolstr(ui, tool, b"executable", tool)
     return procutil.findexe(util.expandpath(exe))
 
 
 def _picktool(repo, ui, path, binary, symlink, changedelete):
-    strictcheck = ui.configbool('merge', 'strict-capability-check')
+    strictcheck = ui.configbool(b'merge', b'strict-capability-check')
 
     def hascapability(tool, capability, strict=False):
         if tool in internals:
@@ -175,33 +175,33 @@
     def check(tool, pat, symlink, binary, changedelete):
         tmsg = tool
         if pat:
-            tmsg = _("%s (for pattern %s)") % (tool, pat)
+            tmsg = _(b"%s (for pattern %s)") % (tool, pat)
         if not _findtool(ui, tool):
             if pat:  # explicitly requested tool deserves a warning
-                ui.warn(_("couldn't find merge tool %s\n") % tmsg)
+                ui.warn(_(b"couldn't find merge tool %s\n") % tmsg)
             else:  # configured but non-existing tools are more silent
-                ui.note(_("couldn't find merge tool %s\n") % tmsg)
-        elif symlink and not hascapability(tool, "symlink", strictcheck):
-            ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
-        elif binary and not hascapability(tool, "binary", strictcheck):
-            ui.warn(_("tool %s can't handle binary\n") % tmsg)
+                ui.note(_(b"couldn't find merge tool %s\n") % tmsg)
+        elif symlink and not hascapability(tool, b"symlink", strictcheck):
+            ui.warn(_(b"tool %s can't handle symlinks\n") % tmsg)
+        elif binary and not hascapability(tool, b"binary", strictcheck):
+            ui.warn(_(b"tool %s can't handle binary\n") % tmsg)
         elif changedelete and not supportscd(tool):
             # the nomerge tools are the only tools that support change/delete
             # conflicts
             pass
-        elif not procutil.gui() and _toolbool(ui, tool, "gui"):
-            ui.warn(_("tool %s requires a GUI\n") % tmsg)
+        elif not procutil.gui() and _toolbool(ui, tool, b"gui"):
+            ui.warn(_(b"tool %s requires a GUI\n") % tmsg)
         else:
             return True
         return False
 
     # internal config: ui.forcemerge
     # forcemerge comes from command line arguments, highest priority
-    force = ui.config('ui', 'forcemerge')
+    force = ui.config(b'ui', b'forcemerge')
     if force:
         toolpath = _findtool(ui, force)
         if changedelete and not supportscd(toolpath):
-            return ":prompt", None
+            return b":prompt", None
         else:
             if toolpath:
                 return (force, _quotetoolpath(toolpath))
@@ -210,10 +210,10 @@
                 return (force, force)
 
     # HGMERGE takes next precedence
-    hgmerge = encoding.environ.get("HGMERGE")
+    hgmerge = encoding.environ.get(b"HGMERGE")
     if hgmerge:
         if changedelete and not supportscd(hgmerge):
-            return ":prompt", None
+            return b":prompt", None
         else:
             return (hgmerge, hgmerge)
 
@@ -222,16 +222,16 @@
     # whether binary capability should be checked strictly
     binarycap = binary and strictcheck
 
-    for pat, tool in ui.configitems("merge-patterns"):
-        mf = match.match(repo.root, '', [pat])
+    for pat, tool in ui.configitems(b"merge-patterns"):
+        mf = match.match(repo.root, b'', [pat])
         if mf(path) and check(tool, pat, symlink, binarycap, changedelete):
-            if binary and not hascapability(tool, "binary", strict=True):
+            if binary and not hascapability(tool, b"binary", strict=True):
                 ui.warn(
                     _(
-                        "warning: check merge-patterns configurations,"
-                        " if %r for binary file %r is unintentional\n"
-                        "(see 'hg help merge-tools'"
-                        " for binary files capability)\n"
+                        b"warning: check merge-patterns configurations,"
+                        b" if %r for binary file %r is unintentional\n"
+                        b"(see 'hg help merge-tools'"
+                        b" for binary files capability)\n"
                     )
                     % (pycompat.bytestr(tool), pycompat.bytestr(path))
                 )
@@ -241,17 +241,17 @@
     # then merge tools
     tools = {}
     disabled = set()
-    for k, v in ui.configitems("merge-tools"):
-        t = k.split('.')[0]
+    for k, v in ui.configitems(b"merge-tools"):
+        t = k.split(b'.')[0]
         if t not in tools:
-            tools[t] = int(_toolstr(ui, t, "priority"))
-        if _toolbool(ui, t, "disabled"):
+            tools[t] = int(_toolstr(ui, t, b"priority"))
+        if _toolbool(ui, t, b"disabled"):
             disabled.add(t)
     names = tools.keys()
     tools = sorted(
         [(-p, tool) for tool, p in tools.items() if tool not in disabled]
     )
-    uimerge = ui.config("ui", "merge")
+    uimerge = ui.config(b"ui", b"merge")
     if uimerge:
         # external tools defined in uimerge won't be able to handle
         # change/delete conflicts
@@ -259,7 +259,7 @@
             if uimerge not in names and not changedelete:
                 return (uimerge, uimerge)
             tools.insert(0, (None, uimerge))  # highest priority
-    tools.append((None, "hgmerge"))  # the old default, if found
+    tools.append((None, b"hgmerge"))  # the old default, if found
     for p, t in tools:
         if check(t, None, symlink, binary, changedelete):
             toolpath = _findtool(ui, t)
@@ -269,26 +269,26 @@
     if symlink or binary or changedelete:
         if not changedelete and len(tools):
             # any tool is rejected by capability for symlink or binary
-            ui.warn(_("no tool found to merge %s\n") % path)
-        return ":prompt", None
-    return ":merge", None
+            ui.warn(_(b"no tool found to merge %s\n") % path)
+        return b":prompt", None
+    return b":merge", None
 
 
 def _eoltype(data):
-    "Guess the EOL type of a file"
-    if '\0' in data:  # binary
+    b"Guess the EOL type of a file"
+    if b'\0' in data:  # binary
         return None
-    if '\r\n' in data:  # Windows
-        return '\r\n'
-    if '\r' in data:  # Old Mac
-        return '\r'
-    if '\n' in data:  # UNIX
-        return '\n'
+    if b'\r\n' in data:  # Windows
+        return b'\r\n'
+    if b'\r' in data:  # Old Mac
+        return b'\r'
+    if b'\n' in data:  # UNIX
+        return b'\n'
     return None  # unknown
 
 
 def _matcheol(file, back):
-    "Convert EOL markers in a file to match origfile"
+    b"Convert EOL markers in a file to match origfile"
     tostyle = _eoltype(back.data())  # No repo.wread filters?
     if tostyle:
         data = util.readfile(file)
@@ -299,7 +299,7 @@
                 util.writefile(file, newdata)
 
 
-@internaltool('prompt', nomerge)
+@internaltool(b'prompt', nomerge)
 def _iprompt(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
     """Asks the user which of the local `p1()` or the other `p2()` version to
     keep as the merged version."""
@@ -311,53 +311,53 @@
     # conflicts.
     if fcd.changectx().isinmemory():
         raise error.InMemoryMergeConflictsError(
-            'in-memory merge does not ' 'support file conflicts'
+            b'in-memory merge does not ' b'support file conflicts'
         )
 
     prompts = partextras(labels)
-    prompts['fd'] = uipathfn(fd)
+    prompts[b'fd'] = uipathfn(fd)
     try:
         if fco.isabsent():
             index = ui.promptchoice(_localchangedotherdeletedmsg % prompts, 2)
-            choice = ['local', 'other', 'unresolved'][index]
+            choice = [b'local', b'other', b'unresolved'][index]
         elif fcd.isabsent():
             index = ui.promptchoice(_otherchangedlocaldeletedmsg % prompts, 2)
-            choice = ['other', 'local', 'unresolved'][index]
+            choice = [b'other', b'local', b'unresolved'][index]
         else:
             # IMPORTANT: keep the last line of this prompt ("What do you want to
             # do?") very short, see comment next to _localchangedotherdeletedmsg
             # at the top of the file for details.
             index = ui.promptchoice(
                 _(
-                    "file '%(fd)s' needs to be resolved.\n"
-                    "You can keep (l)ocal%(l)s, take (o)ther%(o)s, or leave "
-                    "(u)nresolved.\n"
-                    "What do you want to do?"
-                    "$$ &Local $$ &Other $$ &Unresolved"
+                    b"file '%(fd)s' needs to be resolved.\n"
+                    b"You can keep (l)ocal%(l)s, take (o)ther%(o)s, or leave "
+                    b"(u)nresolved.\n"
+                    b"What do you want to do?"
+                    b"$$ &Local $$ &Other $$ &Unresolved"
                 )
                 % prompts,
                 2,
             )
-            choice = ['local', 'other', 'unresolved'][index]
+            choice = [b'local', b'other', b'unresolved'][index]
 
-        if choice == 'other':
+        if choice == b'other':
             return _iother(repo, mynode, orig, fcd, fco, fca, toolconf, labels)
-        elif choice == 'local':
+        elif choice == b'local':
             return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf, labels)
-        elif choice == 'unresolved':
+        elif choice == b'unresolved':
             return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf, labels)
     except error.ResponseExpected:
-        ui.write("\n")
+        ui.write(b"\n")
         return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf, labels)
 
 
-@internaltool('local', nomerge)
+@internaltool(b'local', nomerge)
 def _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
     """Uses the local `p1()` version of files as the merged version."""
     return 0, fcd.isabsent()
 
 
-@internaltool('other', nomerge)
+@internaltool(b'other', nomerge)
 def _iother(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
     """Uses the other `p2()` version of files as the merged version."""
     if fco.isabsent():
@@ -370,7 +370,7 @@
     return 0, deleted
 
 
-@internaltool('fail', nomerge)
+@internaltool(b'fail', nomerge)
 def _ifail(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
     """
     Rather than attempting to merge files that were modified on both
@@ -401,29 +401,29 @@
 
     ui = repo.ui
 
-    validkeep = ['keep', 'keep-merge3']
+    validkeep = [b'keep', b'keep-merge3']
 
     # do we attempt to simplemerge first?
     try:
-        premerge = _toolbool(ui, tool, "premerge", not binary)
+        premerge = _toolbool(ui, tool, b"premerge", not binary)
     except error.ConfigError:
-        premerge = _toolstr(ui, tool, "premerge", "").lower()
+        premerge = _toolstr(ui, tool, b"premerge", b"").lower()
         if premerge not in validkeep:
-            _valid = ', '.join(["'" + v + "'" for v in validkeep])
+            _valid = b', '.join([b"'" + v + b"'" for v in validkeep])
             raise error.ConfigError(
-                _("%s.premerge not valid " "('%s' is neither boolean nor %s)")
+                _(b"%s.premerge not valid " b"('%s' is neither boolean nor %s)")
                 % (tool, premerge, _valid)
             )
 
     if premerge:
-        if premerge == 'keep-merge3':
+        if premerge == b'keep-merge3':
             if not labels:
                 labels = _defaultconflictlabels
             if len(labels) < 3:
-                labels.append('base')
+                labels.append(b'base')
         r = simplemerge.simplemerge(ui, fcd, fca, fco, quiet=True, label=labels)
         if not r:
-            ui.debug(" premerge successful\n")
+            ui.debug(b" premerge successful\n")
             return 0
         if premerge not in validkeep:
             # restore from backup and try again
@@ -436,15 +436,15 @@
     uipathfn = scmutil.getuipathfn(repo)
     if symlink:
         repo.ui.warn(
-            _('warning: internal %s cannot merge symlinks ' 'for %s\n')
+            _(b'warning: internal %s cannot merge symlinks ' b'for %s\n')
             % (tool, uipathfn(fcd.path()))
         )
         return False
     if fcd.isabsent() or fco.isabsent():
         repo.ui.warn(
             _(
-                'warning: internal %s cannot merge change/delete '
-                'conflict for %s\n'
+                b'warning: internal %s cannot merge change/delete '
+                b'conflict for %s\n'
             )
             % (tool, uipathfn(fcd.path()))
         )
@@ -465,11 +465,11 @@
 
 
 @internaltool(
-    'union',
+    b'union',
     fullmerge,
     _(
-        "warning: conflicts while merging %s! "
-        "(edit, then use 'hg resolve --mark')\n"
+        b"warning: conflicts while merging %s! "
+        b"(edit, then use 'hg resolve --mark')\n"
     ),
     precheck=_mergecheck,
 )
@@ -479,16 +479,16 @@
     files. It will use both left and right sides for conflict regions.
     No markers are inserted."""
     return _merge(
-        repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, 'union'
+        repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, b'union'
     )
 
 
 @internaltool(
-    'merge',
+    b'merge',
     fullmerge,
     _(
-        "warning: conflicts while merging %s! "
-        "(edit, then use 'hg resolve --mark')\n"
+        b"warning: conflicts while merging %s! "
+        b"(edit, then use 'hg resolve --mark')\n"
     ),
     precheck=_mergecheck,
 )
@@ -499,16 +499,16 @@
     the partially merged file. Markers will have two sections, one for each side
     of merge."""
     return _merge(
-        repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, 'merge'
+        repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, b'merge'
     )
 
 
 @internaltool(
-    'merge3',
+    b'merge3',
     fullmerge,
     _(
-        "warning: conflicts while merging %s! "
-        "(edit, then use 'hg resolve --mark')\n"
+        b"warning: conflicts while merging %s! "
+        b"(edit, then use 'hg resolve --mark')\n"
     ),
     precheck=_mergecheck,
 )
@@ -521,7 +521,7 @@
     if not labels:
         labels = _defaultconflictlabels
     if len(labels) < 3:
-        labels.append('base')
+        labels.append(b'base')
     return _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels)
 
 
@@ -547,31 +547,31 @@
     return True, r
 
 
-@internaltool('merge-local', mergeonly, precheck=_mergecheck)
+@internaltool(b'merge-local', mergeonly, precheck=_mergecheck)
 def _imergelocal(*args, **kwargs):
     """
     Like :merge, but resolve all conflicts non-interactively in favor
     of the local `p1()` changes."""
-    success, status = _imergeauto(localorother='local', *args, **kwargs)
+    success, status = _imergeauto(localorother=b'local', *args, **kwargs)
     return success, status, False
 
 
-@internaltool('merge-other', mergeonly, precheck=_mergecheck)
+@internaltool(b'merge-other', mergeonly, precheck=_mergecheck)
 def _imergeother(*args, **kwargs):
     """
     Like :merge, but resolve all conflicts non-interactively in favor
     of the other `p2()` changes."""
-    success, status = _imergeauto(localorother='other', *args, **kwargs)
+    success, status = _imergeauto(localorother=b'other', *args, **kwargs)
     return success, status, False
 
 
 @internaltool(
-    'tagmerge',
+    b'tagmerge',
     mergeonly,
     _(
-        "automatic tag merging of %s failed! "
-        "(use 'hg resolve --tool :merge' or another merge "
-        "tool of your choice)\n"
+        b"automatic tag merging of %s failed! "
+        b"(use 'hg resolve --tool :merge' or another merge "
+        b"tool of your choice)\n"
     ),
 )
 def _itagmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
@@ -582,7 +582,7 @@
     return success, status, False
 
 
-@internaltool('dump', fullmerge, binary=True, symlink=True)
+@internaltool(b'dump', fullmerge, binary=True, symlink=True)
 def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
     """
     Creates three versions of the files to merge, containing the
@@ -602,16 +602,16 @@
 
     if isinstance(fcd, context.overlayworkingfilectx):
         raise error.InMemoryMergeConflictsError(
-            'in-memory merge does not ' 'support the :dump tool.'
+            b'in-memory merge does not ' b'support the :dump tool.'
         )
 
-    util.writefile(a + ".local", fcd.decodeddata())
-    repo.wwrite(fd + ".other", fco.data(), fco.flags())
-    repo.wwrite(fd + ".base", fca.data(), fca.flags())
+    util.writefile(a + b".local", fcd.decodeddata())
+    repo.wwrite(fd + b".other", fco.data(), fco.flags())
+    repo.wwrite(fd + b".base", fca.data(), fca.flags())
     return False, 1, False
 
 
-@internaltool('forcedump', mergeonly, binary=True, symlink=True)
+@internaltool(b'forcedump', mergeonly, binary=True, symlink=True)
 def _forcedump(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
     """
     Creates three versions of the files as same as :dump, but omits premerge.
@@ -631,50 +631,50 @@
     # directory and tell the user how to get it is my best idea, but it's
     # clunky.)
     raise error.InMemoryMergeConflictsError(
-        'in-memory merge does not support ' 'external merge tools'
+        b'in-memory merge does not support ' b'external merge tools'
     )
 
 
 def _describemerge(ui, repo, mynode, fcl, fcb, fco, env, toolpath, args):
-    tmpl = ui.config('ui', 'pre-merge-tool-output-template')
+    tmpl = ui.config(b'ui', b'pre-merge-tool-output-template')
     if not tmpl:
         return
 
     mappingdict = templateutil.mappingdict
     props = {
-        'ctx': fcl.changectx(),
-        'node': hex(mynode),
-        'path': fcl.path(),
-        'local': mappingdict(
+        b'ctx': fcl.changectx(),
+        b'node': hex(mynode),
+        b'path': fcl.path(),
+        b'local': mappingdict(
             {
-                'ctx': fcl.changectx(),
-                'fctx': fcl,
-                'node': hex(mynode),
-                'name': _('local'),
-                'islink': 'l' in fcl.flags(),
-                'label': env['HG_MY_LABEL'],
+                b'ctx': fcl.changectx(),
+                b'fctx': fcl,
+                b'node': hex(mynode),
+                b'name': _(b'local'),
+                b'islink': b'l' in fcl.flags(),
+                b'label': env[b'HG_MY_LABEL'],
             }
         ),
-        'base': mappingdict(
+        b'base': mappingdict(
             {
-                'ctx': fcb.changectx(),
-                'fctx': fcb,
-                'name': _('base'),
-                'islink': 'l' in fcb.flags(),
-                'label': env['HG_BASE_LABEL'],
+                b'ctx': fcb.changectx(),
+                b'fctx': fcb,
+                b'name': _(b'base'),
+                b'islink': b'l' in fcb.flags(),
+                b'label': env[b'HG_BASE_LABEL'],
             }
         ),
-        'other': mappingdict(
+        b'other': mappingdict(
             {
-                'ctx': fco.changectx(),
-                'fctx': fco,
-                'name': _('other'),
-                'islink': 'l' in fco.flags(),
-                'label': env['HG_OTHER_LABEL'],
+                b'ctx': fco.changectx(),
+                b'fctx': fco,
+                b'name': _(b'other'),
+                b'islink': b'l' in fco.flags(),
+                b'label': env[b'HG_OTHER_LABEL'],
             }
         ),
-        'toolpath': toolpath,
-        'toolargs': args,
+        b'toolpath': toolpath,
+        b'toolargs': args,
     }
 
     # TODO: make all of this something that can be specified on a per-tool basis
@@ -694,50 +694,50 @@
     uipathfn = scmutil.getuipathfn(repo)
     if fcd.isabsent() or fco.isabsent():
         repo.ui.warn(
-            _('warning: %s cannot merge change/delete conflict ' 'for %s\n')
+            _(b'warning: %s cannot merge change/delete conflict ' b'for %s\n')
             % (tool, uipathfn(fcd.path()))
         )
         return False, 1, None
     unused, unused, unused, back = files
     localpath = _workingpath(repo, fcd)
-    args = _toolstr(repo.ui, tool, "args")
+    args = _toolstr(repo.ui, tool, b"args")
 
     with _maketempfiles(
-        repo, fco, fca, repo.wvfs.join(back.path()), "$output" in args
+        repo, fco, fca, repo.wvfs.join(back.path()), b"$output" in args
     ) as temppaths:
         basepath, otherpath, localoutputpath = temppaths
-        outpath = ""
+        outpath = b""
         mylabel, otherlabel = labels[:2]
         if len(labels) >= 3:
             baselabel = labels[2]
         else:
-            baselabel = 'base'
+            baselabel = b'base'
         env = {
-            'HG_FILE': fcd.path(),
-            'HG_MY_NODE': short(mynode),
-            'HG_OTHER_NODE': short(fco.changectx().node()),
-            'HG_BASE_NODE': short(fca.changectx().node()),
-            'HG_MY_ISLINK': 'l' in fcd.flags(),
-            'HG_OTHER_ISLINK': 'l' in fco.flags(),
-            'HG_BASE_ISLINK': 'l' in fca.flags(),
-            'HG_MY_LABEL': mylabel,
-            'HG_OTHER_LABEL': otherlabel,
-            'HG_BASE_LABEL': baselabel,
+            b'HG_FILE': fcd.path(),
+            b'HG_MY_NODE': short(mynode),
+            b'HG_OTHER_NODE': short(fco.changectx().node()),
+            b'HG_BASE_NODE': short(fca.changectx().node()),
+            b'HG_MY_ISLINK': b'l' in fcd.flags(),
+            b'HG_OTHER_ISLINK': b'l' in fco.flags(),
+            b'HG_BASE_ISLINK': b'l' in fca.flags(),
+            b'HG_MY_LABEL': mylabel,
+            b'HG_OTHER_LABEL': otherlabel,
+            b'HG_BASE_LABEL': baselabel,
         }
         ui = repo.ui
 
-        if "$output" in args:
+        if b"$output" in args:
             # read input from backup, write to original
             outpath = localpath
             localpath = localoutputpath
         replace = {
-            'local': localpath,
-            'base': basepath,
-            'other': otherpath,
-            'output': outpath,
-            'labellocal': mylabel,
-            'labelother': otherlabel,
-            'labelbase': baselabel,
+            b'local': localpath,
+            b'base': basepath,
+            b'other': otherpath,
+            b'output': outpath,
+            b'labellocal': mylabel,
+            b'labelother': otherlabel,
+            b'labelbase': baselabel,
         }
         args = util.interpolate(
             br'\$',
@@ -745,47 +745,47 @@
             args,
             lambda s: procutil.shellquote(util.localpath(s)),
         )
-        if _toolbool(ui, tool, "gui"):
+        if _toolbool(ui, tool, b"gui"):
             repo.ui.status(
-                _('running merge tool %s for file %s\n')
+                _(b'running merge tool %s for file %s\n')
                 % (tool, uipathfn(fcd.path()))
             )
         if scriptfn is None:
-            cmd = toolpath + ' ' + args
-            repo.ui.debug('launching merge tool: %s\n' % cmd)
+            cmd = toolpath + b' ' + args
+            repo.ui.debug(b'launching merge tool: %s\n' % cmd)
             _describemerge(ui, repo, mynode, fcd, fca, fco, env, toolpath, args)
             r = ui.system(
-                cmd, cwd=repo.root, environ=env, blockedtag='mergetool'
+                cmd, cwd=repo.root, environ=env, blockedtag=b'mergetool'
             )
         else:
             repo.ui.debug(
-                'launching python merge script: %s:%s\n' % (toolpath, scriptfn)
+                b'launching python merge script: %s:%s\n' % (toolpath, scriptfn)
             )
             r = 0
             try:
                 # avoid cycle cmdutil->merge->filemerge->extensions->cmdutil
                 from . import extensions
 
-                mod = extensions.loadpath(toolpath, 'hgmerge.%s' % tool)
+                mod = extensions.loadpath(toolpath, b'hgmerge.%s' % tool)
             except Exception:
                 raise error.Abort(
-                    _("loading python merge script failed: %s") % toolpath
+                    _(b"loading python merge script failed: %s") % toolpath
                 )
             mergefn = getattr(mod, scriptfn, None)
             if mergefn is None:
                 raise error.Abort(
-                    _("%s does not have function: %s") % (toolpath, scriptfn)
+                    _(b"%s does not have function: %s") % (toolpath, scriptfn)
                 )
             argslist = procutil.shellsplit(args)
             # avoid cycle cmdutil->merge->filemerge->hook->extensions->cmdutil
             from . import hook
 
             ret, raised = hook.pythonhook(
-                ui, repo, "merge", toolpath, mergefn, {'args': argslist}, True
+                ui, repo, b"merge", toolpath, mergefn, {b'args': argslist}, True
             )
             if raised:
                 r = 1
-        repo.ui.debug('merge tool returned: %d\n' % r)
+        repo.ui.debug(b'merge tool returned: %d\n' % r)
         return True, r, False
 
 
@@ -798,11 +798,11 @@
     if ctx.node() is None:
         ctx = ctx.p1()
 
-    props = {'ctx': ctx}
+    props = {b'ctx': ctx}
     templateresult = template.renderdefault(props)
 
-    label = ('%s:' % label).ljust(pad + 1)
-    mark = '%s %s' % (label, templateresult)
+    label = (b'%s:' % label).ljust(pad + 1)
+    mark = b'%s %s' % (label, templateresult)
 
     if mark:
         mark = mark.splitlines()[0]  # split for safety
@@ -811,7 +811,7 @@
     return stringutil.ellipsis(mark, 80 - 8)
 
 
-_defaultconflictlabels = ['local', 'other']
+_defaultconflictlabels = [b'local', b'other']
 
 
 def _formatlabels(repo, fcd, fco, fca, labels, tool=None):
@@ -824,9 +824,9 @@
     ca = fca.changectx()
 
     ui = repo.ui
-    template = ui.config('ui', 'mergemarkertemplate')
+    template = ui.config(b'ui', b'mergemarkertemplate')
     if tool is not None:
-        template = _toolstr(ui, tool, 'mergemarkertemplate', template)
+        template = _toolstr(ui, tool, b'mergemarkertemplate', template)
     template = templater.unquotestring(template)
     tres = formatter.templateresources(ui, repo)
     tmpl = formatter.maketemplater(
@@ -851,13 +851,13 @@
     """
     if labels is None:
         return {
-            "l": "",
-            "o": "",
+            b"l": b"",
+            b"o": b"",
         }
 
     return {
-        "l": " [%s]" % labels[0],
-        "o": " [%s]" % labels[1],
+        b"l": b" [%s]" % labels[0],
+        b"o": b" [%s]" % labels[1],
     }
 
 
@@ -919,20 +919,20 @@
     use them.
     """
     tmproot = None
-    tmprootprefix = repo.ui.config('experimental', 'mergetempdirprefix')
+    tmprootprefix = repo.ui.config(b'experimental', b'mergetempdirprefix')
     if tmprootprefix:
         tmproot = pycompat.mkdtemp(prefix=tmprootprefix)
 
     def maketempfrompath(prefix, path):
         fullbase, ext = os.path.splitext(path)
-        pre = "%s~%s" % (os.path.basename(fullbase), prefix)
+        pre = b"%s~%s" % (os.path.basename(fullbase), prefix)
         if tmproot:
             name = os.path.join(tmproot, pre)
             if ext:
                 name += ext
             f = open(name, r"wb")
         else:
-            fd, name = pycompat.mkstemp(prefix=pre + '.', suffix=ext)
+            fd, name = pycompat.mkstemp(prefix=pre + b'.', suffix=ext)
             f = os.fdopen(fd, r"wb")
         return f, name
 
@@ -943,16 +943,16 @@
         f.close()
         return name
 
-    b = tempfromcontext("base", fca)
-    c = tempfromcontext("other", fco)
+    b = tempfromcontext(b"base", fca)
+    c = tempfromcontext(b"other", fco)
     d = localpath
     if uselocalpath:
         # We start off with this being the backup filename, so remove the .orig
         # to make syntax-highlighting more likely.
-        if d.endswith('.orig'):
+        if d.endswith(b'.orig'):
             d, _ = os.path.splitext(d)
-        f, d = maketempfrompath("local", d)
-        with open(localpath, 'rb') as src:
+        f, d = maketempfrompath(b"local", d)
+        with open(localpath, b'rb') as src:
             f.write(src.read())
         f.close()
 
@@ -991,29 +991,29 @@
     uipathfn = scmutil.getuipathfn(repo)
     fduipath = uipathfn(fd)
     binary = fcd.isbinary() or fco.isbinary() or fca.isbinary()
-    symlink = 'l' in fcd.flags() + fco.flags()
+    symlink = b'l' in fcd.flags() + fco.flags()
     changedelete = fcd.isabsent() or fco.isabsent()
     tool, toolpath = _picktool(repo, ui, fd, binary, symlink, changedelete)
     scriptfn = None
-    if tool in internals and tool.startswith('internal:'):
+    if tool in internals and tool.startswith(b'internal:'):
         # normalize to new-style names (':merge' etc)
-        tool = tool[len('internal') :]
-    if toolpath and toolpath.startswith('python:'):
+        tool = tool[len(b'internal') :]
+    if toolpath and toolpath.startswith(b'python:'):
         invalidsyntax = False
-        if toolpath.count(':') >= 2:
-            script, scriptfn = toolpath[7:].rsplit(':', 1)
+        if toolpath.count(b':') >= 2:
+            script, scriptfn = toolpath[7:].rsplit(b':', 1)
             if not scriptfn:
                 invalidsyntax = True
             # missing :callable can lead to spliting on windows drive letter
-            if '\\' in scriptfn or '/' in scriptfn:
+            if b'\\' in scriptfn or b'/' in scriptfn:
                 invalidsyntax = True
         else:
             invalidsyntax = True
         if invalidsyntax:
-            raise error.Abort(_("invalid 'python:' syntax: %s") % toolpath)
+            raise error.Abort(_(b"invalid 'python:' syntax: %s") % toolpath)
         toolpath = script
     ui.debug(
-        "picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n"
+        b"picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n"
         % (
             tool,
             fduipath,
@@ -1035,7 +1035,7 @@
         else:
             func = _xmerge
         mergetype = fullmerge
-        onfailure = _("merging %s failed!\n")
+        onfailure = _(b"merging %s failed!\n")
         precheck = None
         isexternal = True
 
@@ -1048,19 +1048,19 @@
     if premerge:
         if orig != fco.path():
             ui.status(
-                _("merging %s and %s to %s\n")
+                _(b"merging %s and %s to %s\n")
                 % (uipathfn(orig), uipathfn(fco.path()), fduipath)
             )
         else:
-            ui.status(_("merging %s\n") % fduipath)
+            ui.status(_(b"merging %s\n") % fduipath)
 
-    ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
+    ui.debug(b"my %s other %s ancestor %s\n" % (fcd, fco, fca))
 
     if precheck and not precheck(repo, mynode, orig, fcd, fco, fca, toolconf):
         if onfailure:
             if wctx.isinmemory():
                 raise error.InMemoryMergeConflictsError(
-                    'in-memory merge does ' 'not support merge ' 'conflicts'
+                    b'in-memory merge does ' b'not support merge ' b'conflicts'
                 )
             ui.warn(onfailure % fduipath)
         return True, 1, False
@@ -1069,16 +1069,16 @@
     files = (None, None, None, back)
     r = 1
     try:
-        internalmarkerstyle = ui.config('ui', 'mergemarkers')
+        internalmarkerstyle = ui.config(b'ui', b'mergemarkers')
         if isexternal:
-            markerstyle = _toolstr(ui, tool, 'mergemarkers')
+            markerstyle = _toolstr(ui, tool, b'mergemarkers')
         else:
             markerstyle = internalmarkerstyle
 
         if not labels:
             labels = _defaultconflictlabels
         formattedlabels = labels
-        if markerstyle != 'basic':
+        if markerstyle != b'basic':
             formattedlabels = _formatlabels(
                 repo, fcd, fco, fca, labels, tool=tool
             )
@@ -1091,11 +1091,11 @@
             # in conflict markers if premerge is 'keep' or 'keep-merge3'.
             premergelabels = labels
             labeltool = None
-            if markerstyle != 'basic':
+            if markerstyle != b'basic':
                 # respect 'tool's mergemarkertemplate (which defaults to
                 # ui.mergemarkertemplate)
                 labeltool = tool
-            if internalmarkerstyle != 'basic' or markerstyle != 'basic':
+            if internalmarkerstyle != b'basic' or markerstyle != b'basic':
                 premergelabels = _formatlabels(
                     repo, fcd, fco, fca, premergelabels, tool=labeltool
                 )
@@ -1125,7 +1125,9 @@
             if onfailure:
                 if wctx.isinmemory():
                     raise error.InMemoryMergeConflictsError(
-                        'in-memory merge ' 'does not support ' 'merge conflicts'
+                        b'in-memory merge '
+                        b'does not support '
+                        b'merge conflicts'
                     )
                 ui.warn(onfailure % fduipath)
             _onfilemergefailure(ui)
@@ -1137,24 +1139,24 @@
 
 
 def _haltmerge():
-    msg = _('merge halted after failed merge (see hg resolve)')
+    msg = _(b'merge halted after failed merge (see hg resolve)')
     raise error.InterventionRequired(msg)
 
 
 def _onfilemergefailure(ui):
-    action = ui.config('merge', 'on-failure')
-    if action == 'prompt':
-        msg = _('continue merge operation (yn)?' '$$ &Yes $$ &No')
+    action = ui.config(b'merge', b'on-failure')
+    if action == b'prompt':
+        msg = _(b'continue merge operation (yn)?' b'$$ &Yes $$ &No')
         if ui.promptchoice(msg, 0) == 1:
             _haltmerge()
-    if action == 'halt':
+    if action == b'halt':
         _haltmerge()
     # default action is 'continue', in which case we neither prompt nor halt
 
 
 def hasconflictmarkers(data):
     return bool(
-        re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", data, re.MULTILINE)
+        re.search(b"^(<<<<<<< .*|=======|>>>>>>> .*)$", data, re.MULTILINE)
     )
 
 
@@ -1164,17 +1166,17 @@
     unused, unused, unused, back = files
 
     if not r and (
-        _toolbool(ui, tool, "checkconflicts")
-        or 'conflicts' in _toollist(ui, tool, "check")
+        _toolbool(ui, tool, b"checkconflicts")
+        or b'conflicts' in _toollist(ui, tool, b"check")
     ):
         if hasconflictmarkers(fcd.data()):
             r = 1
 
     checked = False
-    if 'prompt' in _toollist(ui, tool, "check"):
+    if b'prompt' in _toollist(ui, tool, b"check"):
         checked = True
         if ui.promptchoice(
-            _("was merge of '%s' successful (yn)?" "$$ &Yes $$ &No")
+            _(b"was merge of '%s' successful (yn)?" b"$$ &Yes $$ &No")
             % uipathfn(fd),
             1,
         ):
@@ -1184,23 +1186,23 @@
         not r
         and not checked
         and (
-            _toolbool(ui, tool, "checkchanged")
-            or 'changed' in _toollist(ui, tool, "check")
+            _toolbool(ui, tool, b"checkchanged")
+            or b'changed' in _toollist(ui, tool, b"check")
         )
     ):
         if back is not None and not fcd.cmp(back):
             if ui.promptchoice(
                 _(
-                    " output file %s appears unchanged\n"
-                    "was merge successful (yn)?"
-                    "$$ &Yes $$ &No"
+                    b" output file %s appears unchanged\n"
+                    b"was merge successful (yn)?"
+                    b"$$ &Yes $$ &No"
                 )
                 % uipathfn(fd),
                 1,
             ):
                 r = 1
 
-    if back is not None and _toolbool(ui, tool, "fixeol"):
+    if back is not None and _toolbool(ui, tool, b"fixeol"):
         _matcheol(_workingpath(repo, fcd), back)
 
     return r
@@ -1226,27 +1228,29 @@
     """Load internal merge tool from specified registrarobj
     """
     for name, func in registrarobj._table.iteritems():
-        fullname = ':' + name
+        fullname = b':' + name
         internals[fullname] = func
-        internals['internal:' + name] = func
+        internals[b'internal:' + name] = func
         internalsdoc[fullname] = func
 
         capabilities = sorted([k for k, v in func.capabilities.items() if v])
         if capabilities:
-            capdesc = "    (actual capabilities: %s)" % ', '.join(capabilities)
-            func.__doc__ = func.__doc__ + pycompat.sysstr("\n\n%s" % capdesc)
+            capdesc = b"    (actual capabilities: %s)" % b', '.join(
+                capabilities
+            )
+            func.__doc__ = func.__doc__ + pycompat.sysstr(b"\n\n%s" % capdesc)
 
     # to put i18n comments into hg.pot for automatically generated texts
 
     # i18n: "binary" and "symlink" are keywords
     # i18n: this text is added automatically
-    _("    (actual capabilities: binary, symlink)")
+    _(b"    (actual capabilities: binary, symlink)")
     # i18n: "binary" is keyword
     # i18n: this text is added automatically
-    _("    (actual capabilities: binary)")
+    _(b"    (actual capabilities: binary)")
     # i18n: "symlink" is keyword
     # i18n: this text is added automatically
-    _("    (actual capabilities: symlink)")
+    _(b"    (actual capabilities: symlink)")
 
 
 # load built-in merge tools explicitly to setup internalsdoc
--- a/mercurial/fileset.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/fileset.py	Sun Oct 06 09:48:39 2019 -0400
@@ -39,12 +39,12 @@
 
 def getmatch(mctx, x):
     if not x:
-        raise error.ParseError(_("missing argument"))
+        raise error.ParseError(_(b"missing argument"))
     return methods[x[0]](mctx, *x[1:])
 
 
 def getmatchwithstatus(mctx, x, hint):
-    keys = set(getstring(hint, 'status hint must be a string').split())
+    keys = set(getstring(hint, b'status hint must be a string').split())
     return getmatch(mctx.withstatus(keys), x)
 
 
@@ -56,7 +56,7 @@
     return stringmatch(
         mctx,
         _getkindpat(
-            x, y, matchmod.allpatternkinds, _("pattern must be a string")
+            x, y, matchmod.allpatternkinds, _(b"pattern must be a string")
         ),
     )
 
@@ -64,7 +64,7 @@
 def patternsmatch(mctx, *xs):
     allkinds = matchmod.allpatternkinds
     patterns = [
-        getpattern(x, allkinds, _("pattern must be a string")) for x in xs
+        getpattern(x, allkinds, _(b"pattern must be a string")) for x in xs
     ]
     return mctx.matcher(patterns)
 
@@ -82,7 +82,7 @@
 
 def notmatch(mctx, x):
     m = getmatch(mctx, x)
-    return mctx.predicate(lambda f: not m(f), predrepr=('<not %r>', m))
+    return mctx.predicate(lambda f: not m(f), predrepr=(b'<not %r>', m))
 
 
 def minusmatch(mctx, x, y):
@@ -93,8 +93,8 @@
 
 def listmatch(mctx, *xs):
     raise error.ParseError(
-        _("can't use a list in this context"),
-        hint=_('see \'hg help "filesets.x or y"\''),
+        _(b"can't use a list in this context"),
+        hint=_(b'see \'hg help "filesets.x or y"\''),
     )
 
 
@@ -119,186 +119,186 @@
 predicate = registrar.filesetpredicate(symbols)
 
 
-@predicate('modified()', callstatus=True, weight=_WEIGHT_STATUS)
+@predicate(b'modified()', callstatus=True, weight=_WEIGHT_STATUS)
 def modified(mctx, x):
     """File that is modified according to :hg:`status`.
     """
     # i18n: "modified" is a keyword
-    getargs(x, 0, 0, _("modified takes no arguments"))
+    getargs(x, 0, 0, _(b"modified takes no arguments"))
     s = set(mctx.status().modified)
-    return mctx.predicate(s.__contains__, predrepr='modified')
+    return mctx.predicate(s.__contains__, predrepr=b'modified')
 
 
-@predicate('added()', callstatus=True, weight=_WEIGHT_STATUS)
+@predicate(b'added()', callstatus=True, weight=_WEIGHT_STATUS)
 def added(mctx, x):
     """File that is added according to :hg:`status`.
     """
     # i18n: "added" is a keyword
-    getargs(x, 0, 0, _("added takes no arguments"))
+    getargs(x, 0, 0, _(b"added takes no arguments"))
     s = set(mctx.status().added)
-    return mctx.predicate(s.__contains__, predrepr='added')
+    return mctx.predicate(s.__contains__, predrepr=b'added')
 
 
-@predicate('removed()', callstatus=True, weight=_WEIGHT_STATUS)
+@predicate(b'removed()', callstatus=True, weight=_WEIGHT_STATUS)
 def removed(mctx, x):
     """File that is removed according to :hg:`status`.
     """
     # i18n: "removed" is a keyword
-    getargs(x, 0, 0, _("removed takes no arguments"))
+    getargs(x, 0, 0, _(b"removed takes no arguments"))
     s = set(mctx.status().removed)
-    return mctx.predicate(s.__contains__, predrepr='removed')
+    return mctx.predicate(s.__contains__, predrepr=b'removed')
 
 
-@predicate('deleted()', callstatus=True, weight=_WEIGHT_STATUS)
+@predicate(b'deleted()', callstatus=True, weight=_WEIGHT_STATUS)
 def deleted(mctx, x):
     """Alias for ``missing()``.
     """
     # i18n: "deleted" is a keyword
-    getargs(x, 0, 0, _("deleted takes no arguments"))
+    getargs(x, 0, 0, _(b"deleted takes no arguments"))
     s = set(mctx.status().deleted)
-    return mctx.predicate(s.__contains__, predrepr='deleted')
+    return mctx.predicate(s.__contains__, predrepr=b'deleted')
 
 
-@predicate('missing()', callstatus=True, weight=_WEIGHT_STATUS)
+@predicate(b'missing()', callstatus=True, weight=_WEIGHT_STATUS)
 def missing(mctx, x):
     """File that is missing according to :hg:`status`.
     """
     # i18n: "missing" is a keyword
-    getargs(x, 0, 0, _("missing takes no arguments"))
+    getargs(x, 0, 0, _(b"missing takes no arguments"))
     s = set(mctx.status().deleted)
-    return mctx.predicate(s.__contains__, predrepr='deleted')
+    return mctx.predicate(s.__contains__, predrepr=b'deleted')
 
 
-@predicate('unknown()', callstatus=True, weight=_WEIGHT_STATUS_THOROUGH)
+@predicate(b'unknown()', callstatus=True, weight=_WEIGHT_STATUS_THOROUGH)
 def unknown(mctx, x):
     """File that is unknown according to :hg:`status`."""
     # i18n: "unknown" is a keyword
-    getargs(x, 0, 0, _("unknown takes no arguments"))
+    getargs(x, 0, 0, _(b"unknown takes no arguments"))
     s = set(mctx.status().unknown)
-    return mctx.predicate(s.__contains__, predrepr='unknown')
+    return mctx.predicate(s.__contains__, predrepr=b'unknown')
 
 
-@predicate('ignored()', callstatus=True, weight=_WEIGHT_STATUS_THOROUGH)
+@predicate(b'ignored()', callstatus=True, weight=_WEIGHT_STATUS_THOROUGH)
 def ignored(mctx, x):
     """File that is ignored according to :hg:`status`."""
     # i18n: "ignored" is a keyword
-    getargs(x, 0, 0, _("ignored takes no arguments"))
+    getargs(x, 0, 0, _(b"ignored takes no arguments"))
     s = set(mctx.status().ignored)
-    return mctx.predicate(s.__contains__, predrepr='ignored')
+    return mctx.predicate(s.__contains__, predrepr=b'ignored')
 
 
-@predicate('clean()', callstatus=True, weight=_WEIGHT_STATUS)
+@predicate(b'clean()', callstatus=True, weight=_WEIGHT_STATUS)
 def clean(mctx, x):
     """File that is clean according to :hg:`status`.
     """
     # i18n: "clean" is a keyword
-    getargs(x, 0, 0, _("clean takes no arguments"))
+    getargs(x, 0, 0, _(b"clean takes no arguments"))
     s = set(mctx.status().clean)
-    return mctx.predicate(s.__contains__, predrepr='clean')
+    return mctx.predicate(s.__contains__, predrepr=b'clean')
 
 
-@predicate('tracked()')
+@predicate(b'tracked()')
 def tracked(mctx, x):
     """File that is under Mercurial control."""
     # i18n: "tracked" is a keyword
-    getargs(x, 0, 0, _("tracked takes no arguments"))
-    return mctx.predicate(mctx.ctx.__contains__, predrepr='tracked')
+    getargs(x, 0, 0, _(b"tracked takes no arguments"))
+    return mctx.predicate(mctx.ctx.__contains__, predrepr=b'tracked')
 
 
-@predicate('binary()', weight=_WEIGHT_READ_CONTENTS)
+@predicate(b'binary()', weight=_WEIGHT_READ_CONTENTS)
 def binary(mctx, x):
     """File that appears to be binary (contains NUL bytes).
     """
     # i18n: "binary" is a keyword
-    getargs(x, 0, 0, _("binary takes no arguments"))
+    getargs(x, 0, 0, _(b"binary takes no arguments"))
     return mctx.fpredicate(
-        lambda fctx: fctx.isbinary(), predrepr='binary', cache=True
+        lambda fctx: fctx.isbinary(), predrepr=b'binary', cache=True
     )
 
 
-@predicate('exec()')
+@predicate(b'exec()')
 def exec_(mctx, x):
     """File that is marked as executable.
     """
     # i18n: "exec" is a keyword
-    getargs(x, 0, 0, _("exec takes no arguments"))
+    getargs(x, 0, 0, _(b"exec takes no arguments"))
     ctx = mctx.ctx
-    return mctx.predicate(lambda f: ctx.flags(f) == 'x', predrepr='exec')
+    return mctx.predicate(lambda f: ctx.flags(f) == b'x', predrepr=b'exec')
 
 
-@predicate('symlink()')
+@predicate(b'symlink()')
 def symlink(mctx, x):
     """File that is marked as a symlink.
     """
     # i18n: "symlink" is a keyword
-    getargs(x, 0, 0, _("symlink takes no arguments"))
+    getargs(x, 0, 0, _(b"symlink takes no arguments"))
     ctx = mctx.ctx
-    return mctx.predicate(lambda f: ctx.flags(f) == 'l', predrepr='symlink')
+    return mctx.predicate(lambda f: ctx.flags(f) == b'l', predrepr=b'symlink')
 
 
-@predicate('resolved()', weight=_WEIGHT_STATUS)
+@predicate(b'resolved()', weight=_WEIGHT_STATUS)
 def resolved(mctx, x):
     """File that is marked resolved according to :hg:`resolve -l`.
     """
     # i18n: "resolved" is a keyword
-    getargs(x, 0, 0, _("resolved takes no arguments"))
+    getargs(x, 0, 0, _(b"resolved takes no arguments"))
     if mctx.ctx.rev() is not None:
         return mctx.never()
     ms = merge.mergestate.read(mctx.ctx.repo())
     return mctx.predicate(
-        lambda f: f in ms and ms[f] == 'r', predrepr='resolved'
+        lambda f: f in ms and ms[f] == b'r', predrepr=b'resolved'
     )
 
 
-@predicate('unresolved()', weight=_WEIGHT_STATUS)
+@predicate(b'unresolved()', weight=_WEIGHT_STATUS)
 def unresolved(mctx, x):
     """File that is marked unresolved according to :hg:`resolve -l`.
     """
     # i18n: "unresolved" is a keyword
-    getargs(x, 0, 0, _("unresolved takes no arguments"))
+    getargs(x, 0, 0, _(b"unresolved takes no arguments"))
     if mctx.ctx.rev() is not None:
         return mctx.never()
     ms = merge.mergestate.read(mctx.ctx.repo())
     return mctx.predicate(
-        lambda f: f in ms and ms[f] == 'u', predrepr='unresolved'
+        lambda f: f in ms and ms[f] == b'u', predrepr=b'unresolved'
     )
 
 
-@predicate('hgignore()', weight=_WEIGHT_STATUS)
+@predicate(b'hgignore()', weight=_WEIGHT_STATUS)
 def hgignore(mctx, x):
     """File that matches the active .hgignore pattern.
     """
     # i18n: "hgignore" is a keyword
-    getargs(x, 0, 0, _("hgignore takes no arguments"))
+    getargs(x, 0, 0, _(b"hgignore takes no arguments"))
     return mctx.ctx.repo().dirstate._ignore
 
 
-@predicate('portable()', weight=_WEIGHT_CHECK_FILENAME)
+@predicate(b'portable()', weight=_WEIGHT_CHECK_FILENAME)
 def portable(mctx, x):
     """File that has a portable name. (This doesn't include filenames with case
     collisions.)
     """
     # i18n: "portable" is a keyword
-    getargs(x, 0, 0, _("portable takes no arguments"))
+    getargs(x, 0, 0, _(b"portable takes no arguments"))
     return mctx.predicate(
-        lambda f: util.checkwinfilename(f) is None, predrepr='portable'
+        lambda f: util.checkwinfilename(f) is None, predrepr=b'portable'
     )
 
 
-@predicate('grep(regex)', weight=_WEIGHT_READ_CONTENTS)
+@predicate(b'grep(regex)', weight=_WEIGHT_READ_CONTENTS)
 def grep(mctx, x):
     """File contains the given regular expression.
     """
     try:
         # i18n: "grep" is a keyword
-        r = re.compile(getstring(x, _("grep requires a pattern")))
+        r = re.compile(getstring(x, _(b"grep requires a pattern")))
     except re.error as e:
         raise error.ParseError(
-            _('invalid match pattern: %s') % stringutil.forcebytestr(e)
+            _(b'invalid match pattern: %s') % stringutil.forcebytestr(e)
         )
     return mctx.fpredicate(
         lambda fctx: r.search(fctx.data()),
-        predrepr=('grep(%r)', r.pattern),
+        predrepr=(b'grep(%r)', r.pattern),
         cache=True,
     )
 
@@ -311,33 +311,33 @@
                 # max(4k) = 5k - 1, max(4.5k) = 4.6k - 1
                 n = s[: -len(k)]
                 inc = 1.0
-                if "." in n:
-                    inc /= 10 ** len(n.split(".")[1])
+                if b"." in n:
+                    inc /= 10 ** len(n.split(b".")[1])
                 return int((float(n) + inc) * v) - 1
         # no extension, this is a precise value
         return int(s)
     except ValueError:
-        raise error.ParseError(_("couldn't parse size: %s") % s)
+        raise error.ParseError(_(b"couldn't parse size: %s") % s)
 
 
 def sizematcher(expr):
     """Return a function(size) -> bool from the ``size()`` expression"""
     expr = expr.strip()
-    if '-' in expr:  # do we have a range?
-        a, b = expr.split('-', 1)
+    if b'-' in expr:  # do we have a range?
+        a, b = expr.split(b'-', 1)
         a = util.sizetoint(a)
         b = util.sizetoint(b)
         return lambda x: x >= a and x <= b
-    elif expr.startswith("<="):
+    elif expr.startswith(b"<="):
         a = util.sizetoint(expr[2:])
         return lambda x: x <= a
-    elif expr.startswith("<"):
+    elif expr.startswith(b"<"):
         a = util.sizetoint(expr[1:])
         return lambda x: x < a
-    elif expr.startswith(">="):
+    elif expr.startswith(b">="):
         a = util.sizetoint(expr[2:])
         return lambda x: x >= a
-    elif expr.startswith(">"):
+    elif expr.startswith(b">"):
         a = util.sizetoint(expr[1:])
         return lambda x: x > a
     else:
@@ -346,7 +346,7 @@
         return lambda x: x >= a and x <= b
 
 
-@predicate('size(expression)', weight=_WEIGHT_STATUS)
+@predicate(b'size(expression)', weight=_WEIGHT_STATUS)
 def size(mctx, x):
     """File size matches the given expression. Examples:
 
@@ -356,14 +356,14 @@
     - size('4k - 1MB') - files from 4096 bytes to 1048576 bytes
     """
     # i18n: "size" is a keyword
-    expr = getstring(x, _("size requires an expression"))
+    expr = getstring(x, _(b"size requires an expression"))
     m = sizematcher(expr)
     return mctx.fpredicate(
-        lambda fctx: m(fctx.size()), predrepr=('size(%r)', expr), cache=True
+        lambda fctx: m(fctx.size()), predrepr=(b'size(%r)', expr), cache=True
     )
 
 
-@predicate('encoding(name)', weight=_WEIGHT_READ_CONTENTS)
+@predicate(b'encoding(name)', weight=_WEIGHT_READ_CONTENTS)
 def encoding(mctx, x):
     """File can be successfully decoded with the given character
     encoding. May not be useful for encodings other than ASCII and
@@ -371,7 +371,7 @@
     """
 
     # i18n: "encoding" is a keyword
-    enc = getstring(x, _("encoding requires an encoding name"))
+    enc = getstring(x, _(b"encoding requires an encoding name"))
 
     def encp(fctx):
         d = fctx.data()
@@ -379,14 +379,14 @@
             d.decode(pycompat.sysstr(enc))
             return True
         except LookupError:
-            raise error.Abort(_("unknown encoding '%s'") % enc)
+            raise error.Abort(_(b"unknown encoding '%s'") % enc)
         except UnicodeDecodeError:
             return False
 
-    return mctx.fpredicate(encp, predrepr=('encoding(%r)', enc), cache=True)
+    return mctx.fpredicate(encp, predrepr=(b'encoding(%r)', enc), cache=True)
 
 
-@predicate('eol(style)', weight=_WEIGHT_READ_CONTENTS)
+@predicate(b'eol(style)', weight=_WEIGHT_READ_CONTENTS)
 def eol(mctx, x):
     """File contains newlines of the given style (dos, unix, mac). Binary
     files are excluded, files with mixed line endings match multiple
@@ -394,46 +394,46 @@
     """
 
     # i18n: "eol" is a keyword
-    enc = getstring(x, _("eol requires a style name"))
+    enc = getstring(x, _(b"eol requires a style name"))
 
     def eolp(fctx):
         if fctx.isbinary():
             return False
         d = fctx.data()
-        if (enc == 'dos' or enc == 'win') and '\r\n' in d:
+        if (enc == b'dos' or enc == b'win') and b'\r\n' in d:
             return True
-        elif enc == 'unix' and re.search('(?<!\r)\n', d):
+        elif enc == b'unix' and re.search(b'(?<!\r)\n', d):
             return True
-        elif enc == 'mac' and re.search('\r(?!\n)', d):
+        elif enc == b'mac' and re.search(b'\r(?!\n)', d):
             return True
         return False
 
-    return mctx.fpredicate(eolp, predrepr=('eol(%r)', enc), cache=True)
+    return mctx.fpredicate(eolp, predrepr=(b'eol(%r)', enc), cache=True)
 
 
-@predicate('copied()')
+@predicate(b'copied()')
 def copied(mctx, x):
     """File that is recorded as being copied.
     """
     # i18n: "copied" is a keyword
-    getargs(x, 0, 0, _("copied takes no arguments"))
+    getargs(x, 0, 0, _(b"copied takes no arguments"))
 
     def copiedp(fctx):
         p = fctx.parents()
         return p and p[0].path() != fctx.path()
 
-    return mctx.fpredicate(copiedp, predrepr='copied', cache=True)
+    return mctx.fpredicate(copiedp, predrepr=b'copied', cache=True)
 
 
-@predicate('revs(revs, pattern)', weight=_WEIGHT_STATUS)
+@predicate(b'revs(revs, pattern)', weight=_WEIGHT_STATUS)
 def revs(mctx, x):
     """Evaluate set in the specified revisions. If the revset match multiple
     revs, this will return file matching pattern in any of the revision.
     """
     # i18n: "revs" is a keyword
-    r, x = getargs(x, 2, 2, _("revs takes two arguments"))
+    r, x = getargs(x, 2, 2, _(b"revs takes two arguments"))
     # i18n: "revs" is a keyword
-    revspec = getstring(r, _("first argument to revs must be a revision"))
+    revspec = getstring(r, _(b"first argument to revs must be a revision"))
     repo = mctx.ctx.repo()
     revs = scmutil.revrange(repo, [revspec])
 
@@ -449,7 +449,7 @@
     return matchmod.unionmatcher(matchers)
 
 
-@predicate('status(base, rev, pattern)', weight=_WEIGHT_STATUS)
+@predicate(b'status(base, rev, pattern)', weight=_WEIGHT_STATUS)
 def status(mctx, x):
     """Evaluate predicate using status change between ``base`` and
     ``rev``. Examples:
@@ -458,13 +458,13 @@
     """
     repo = mctx.ctx.repo()
     # i18n: "status" is a keyword
-    b, r, x = getargs(x, 3, 3, _("status takes three arguments"))
+    b, r, x = getargs(x, 3, 3, _(b"status takes three arguments"))
     # i18n: "status" is a keyword
-    baseerr = _("first argument to status must be a revision")
+    baseerr = _(b"first argument to status must be a revision")
     baserevspec = getstring(b, baseerr)
     if not baserevspec:
         raise error.ParseError(baseerr)
-    reverr = _("second argument to status must be a revision")
+    reverr = _(b"second argument to status must be a revision")
     revspec = getstring(r, reverr)
     if not revspec:
         raise error.ParseError(reverr)
@@ -473,12 +473,12 @@
     return getmatch(mc, x)
 
 
-@predicate('subrepo([pattern])')
+@predicate(b'subrepo([pattern])')
 def subrepo(mctx, x):
     """Subrepositories whose paths match the given pattern.
     """
     # i18n: "subrepo" is a keyword
-    getargs(x, 0, 1, _("subrepo takes at most one argument"))
+    getargs(x, 0, 1, _(b"subrepo takes at most one argument"))
     ctx = mctx.ctx
     sstate = ctx.substate
     if x:
@@ -486,7 +486,7 @@
             x,
             matchmod.allpatternkinds,
             # i18n: "subrepo" is a keyword
-            _("subrepo requires a pattern or no arguments"),
+            _(b"subrepo requires a pattern or no arguments"),
         )
         fast = not matchmod.patkind(pat)
         if fast:
@@ -495,26 +495,26 @@
                 return s == pat
 
         else:
-            m = matchmod.match(ctx.repo().root, '', [pat], ctx=ctx)
+            m = matchmod.match(ctx.repo().root, b'', [pat], ctx=ctx)
         return mctx.predicate(
-            lambda f: f in sstate and m(f), predrepr=('subrepo(%r)', pat)
+            lambda f: f in sstate and m(f), predrepr=(b'subrepo(%r)', pat)
         )
     else:
-        return mctx.predicate(sstate.__contains__, predrepr='subrepo')
+        return mctx.predicate(sstate.__contains__, predrepr=b'subrepo')
 
 
 methods = {
-    'withstatus': getmatchwithstatus,
-    'string': stringmatch,
-    'symbol': stringmatch,
-    'kindpat': kindpatmatch,
-    'patterns': patternsmatch,
-    'and': andmatch,
-    'or': ormatch,
-    'minus': minusmatch,
-    'list': listmatch,
-    'not': notmatch,
-    'func': func,
+    b'withstatus': getmatchwithstatus,
+    b'string': stringmatch,
+    b'symbol': stringmatch,
+    b'kindpat': kindpatmatch,
+    b'patterns': patternsmatch,
+    b'and': andmatch,
+    b'or': ormatch,
+    b'minus': minusmatch,
+    b'list': listmatch,
+    b'not': notmatch,
+    b'func': func,
 }
 
 
@@ -550,9 +550,9 @@
         self._status = self._basectx.status(
             self.ctx,
             self._match,
-            listignored='ignored' in keys,
-            listclean='clean' in keys,
-            listunknown='unknown' in keys,
+            listignored=b'ignored' in keys,
+            listclean=b'clean' in keys,
+            listunknown=b'unknown' in keys,
         )
 
     def status(self):
--- a/mercurial/filesetlang.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/filesetlang.py	Sun Oct 06 09:48:39 2019 -0400
@@ -23,28 +23,28 @@
 
 elements = {
     # token-type: binding-strength, primary, prefix, infix, suffix
-    "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None),
-    ":": (15, None, None, ("kindpat", 15), None),
-    "-": (5, None, ("negate", 19), ("minus", 5), None),
-    "not": (10, None, ("not", 10), None, None),
-    "!": (10, None, ("not", 10), None, None),
-    "and": (5, None, None, ("and", 5), None),
-    "&": (5, None, None, ("and", 5), None),
-    "or": (4, None, None, ("or", 4), None),
-    "|": (4, None, None, ("or", 4), None),
-    "+": (4, None, None, ("or", 4), None),
-    ",": (2, None, None, ("list", 2), None),
-    ")": (0, None, None, None, None),
-    "symbol": (0, "symbol", None, None, None),
-    "string": (0, "string", None, None, None),
-    "end": (0, None, None, None, None),
+    b"(": (20, None, (b"group", 1, b")"), (b"func", 1, b")"), None),
+    b":": (15, None, None, (b"kindpat", 15), None),
+    b"-": (5, None, (b"negate", 19), (b"minus", 5), None),
+    b"not": (10, None, (b"not", 10), None, None),
+    b"!": (10, None, (b"not", 10), None, None),
+    b"and": (5, None, None, (b"and", 5), None),
+    b"&": (5, None, None, (b"and", 5), None),
+    b"or": (4, None, None, (b"or", 4), None),
+    b"|": (4, None, None, (b"or", 4), None),
+    b"+": (4, None, None, (b"or", 4), None),
+    b",": (2, None, None, (b"list", 2), None),
+    b")": (0, None, None, None, None),
+    b"symbol": (0, b"symbol", None, None, None),
+    b"string": (0, b"string", None, None, None),
+    b"end": (0, None, None, None, None),
 }
 
-keywords = {'and', 'or', 'not'}
+keywords = {b'and', b'or', b'not'}
 
 symbols = {}
 
-globchars = ".*{}[]?/\\_"
+globchars = b".*{}[]?/\\_"
 
 
 def tokenize(program):
@@ -54,12 +54,14 @@
         c = program[pos]
         if c.isspace():  # skip inter-token whitespace
             pass
-        elif c in "(),-:|&+!":  # handle simple operators
+        elif c in b"(),-:|&+!":  # handle simple operators
             yield (c, None, pos)
         elif (
-            c in '"\'' or c == 'r' and program[pos : pos + 2] in ("r'", 'r"')
+            c in b'"\''
+            or c == b'r'
+            and program[pos : pos + 2] in (b"r'", b'r"')
         ):  # handle quoted strings
-            if c == 'r':
+            if c == b'r':
                 pos += 1
                 c = program[pos]
                 decode = lambda x: x
@@ -69,15 +71,15 @@
             s = pos
             while pos < l:  # find closing quote
                 d = program[pos]
-                if d == '\\':  # skip over escaped characters
+                if d == b'\\':  # skip over escaped characters
                     pos += 2
                     continue
                 if d == c:
-                    yield ('string', decode(program[s:pos]), s)
+                    yield (b'string', decode(program[s:pos]), s)
                     break
                 pos += 1
             else:
-                raise error.ParseError(_("unterminated string"), s)
+                raise error.ParseError(_(b"unterminated string"), s)
         elif c.isalnum() or c in globchars or ord(c) > 127:
             # gather up a symbol/keyword
             s = pos
@@ -91,30 +93,30 @@
             if sym in keywords:  # operator keywords
                 yield (sym, None, s)
             else:
-                yield ('symbol', sym, s)
+                yield (b'symbol', sym, s)
             pos -= 1
         else:
-            raise error.ParseError(_("syntax error"), pos)
+            raise error.ParseError(_(b"syntax error"), pos)
         pos += 1
-    yield ('end', None, pos)
+    yield (b'end', None, pos)
 
 
 def parse(expr):
     p = parser.parser(elements)
     tree, pos = p.parse(tokenize(expr))
     if pos != len(expr):
-        raise error.ParseError(_("invalid token"), pos)
-    return parser.simplifyinfixops(tree, {'list', 'or'})
+        raise error.ParseError(_(b"invalid token"), pos)
+    return parser.simplifyinfixops(tree, {b'list', b'or'})
 
 
 def getsymbol(x):
-    if x and x[0] == 'symbol':
+    if x and x[0] == b'symbol':
         return x[1]
-    raise error.ParseError(_('not a symbol'))
+    raise error.ParseError(_(b'not a symbol'))
 
 
 def getstring(x, err):
-    if x and (x[0] == 'string' or x[0] == 'symbol'):
+    if x and (x[0] == b'string' or x[0] == b'symbol'):
         return x[1]
     raise error.ParseError(err)
 
@@ -123,12 +125,12 @@
     kind = getsymbol(x)
     pat = getstring(y, err)
     if kind not in allkinds:
-        raise error.ParseError(_("invalid pattern kind: %s") % kind)
-    return '%s:%s' % (kind, pat)
+        raise error.ParseError(_(b"invalid pattern kind: %s") % kind)
+    return b'%s:%s' % (kind, pat)
 
 
 def getpattern(x, allkinds, err):
-    if x and x[0] == 'kindpat':
+    if x and x[0] == b'kindpat':
         return getkindpat(x[1], x[2], allkinds, err)
     return getstring(x, err)
 
@@ -136,7 +138,7 @@
 def getlist(x):
     if not x:
         return []
-    if x[0] == 'list':
+    if x[0] == b'list':
         return list(x[1:])
     return [x]
 
@@ -153,33 +155,33 @@
         return x
 
     op = x[0]
-    if op in {'string', 'symbol'}:
+    if op in {b'string', b'symbol'}:
         return x
-    if op == 'kindpat':
+    if op == b'kindpat':
         getsymbol(x[1])  # kind must be a symbol
         t = _analyze(x[2])
         return (op, x[1], t)
-    if op == 'group':
+    if op == b'group':
         return _analyze(x[1])
-    if op == 'negate':
-        raise error.ParseError(_("can't use negate operator in this context"))
-    if op == 'not':
+    if op == b'negate':
+        raise error.ParseError(_(b"can't use negate operator in this context"))
+    if op == b'not':
         t = _analyze(x[1])
         return (op, t)
-    if op == 'and':
+    if op == b'and':
         ta = _analyze(x[1])
         tb = _analyze(x[2])
         return (op, ta, tb)
-    if op == 'minus':
-        return _analyze(('and', x[1], ('not', x[2])))
-    if op in {'list', 'or'}:
+    if op == b'minus':
+        return _analyze((b'and', x[1], (b'not', x[2])))
+    if op in {b'list', b'or'}:
         ts = tuple(_analyze(y) for y in x[1:])
         return (op,) + ts
-    if op == 'func':
+    if op == b'func':
         getsymbol(x[1])  # function name must be a symbol
         ta = _analyze(x[2])
         return (op, x[1], ta)
-    raise error.ProgrammingError('invalid operator %r' % op)
+    raise error.ProgrammingError(b'invalid operator %r' % op)
 
 
 def _insertstatushints(x):
@@ -195,35 +197,35 @@
         return (), x
 
     op = x[0]
-    if op in {'string', 'symbol', 'kindpat'}:
+    if op in {b'string', b'symbol', b'kindpat'}:
         return (), x
-    if op == 'not':
+    if op == b'not':
         h, t = _insertstatushints(x[1])
         return h, (op, t)
-    if op == 'and':
+    if op == b'and':
         ha, ta = _insertstatushints(x[1])
         hb, tb = _insertstatushints(x[2])
         hr = ha + hb
         if ha and hb:
-            return hr, ('withstatus', (op, ta, tb), ('string', ' '.join(hr)))
+            return hr, (b'withstatus', (op, ta, tb), (b'string', b' '.join(hr)))
         return hr, (op, ta, tb)
-    if op == 'or':
+    if op == b'or':
         hs, ts = zip(*(_insertstatushints(y) for y in x[1:]))
         hr = sum(hs, ())
         if sum(bool(h) for h in hs) > 1:
-            return hr, ('withstatus', (op,) + ts, ('string', ' '.join(hr)))
+            return hr, (b'withstatus', (op,) + ts, (b'string', b' '.join(hr)))
         return hr, (op,) + ts
-    if op == 'list':
+    if op == b'list':
         hs, ts = zip(*(_insertstatushints(y) for y in x[1:]))
         return sum(hs, ()), (op,) + ts
-    if op == 'func':
+    if op == b'func':
         f = getsymbol(x[1])
         # don't propagate 'ha' crossing a function boundary
         ha, ta = _insertstatushints(x[2])
         if getattr(symbols.get(f), '_callstatus', False):
-            return (f,), ('withstatus', (op, x[1], ta), ('string', f))
+            return (f,), (b'withstatus', (op, x[1], ta), (b'string', f))
         return (), (op, x[1], ta)
-    raise error.ProgrammingError('invalid operator %r' % op)
+    raise error.ProgrammingError(b'invalid operator %r' % op)
 
 
 def _mergestatushints(x, instatus):
@@ -235,29 +237,29 @@
         return x
 
     op = x[0]
-    if op == 'withstatus':
+    if op == b'withstatus':
         if instatus:
             # drop redundant hint node
             return _mergestatushints(x[1], instatus)
         t = _mergestatushints(x[1], instatus=True)
         return (op, t, x[2])
-    if op in {'string', 'symbol', 'kindpat'}:
+    if op in {b'string', b'symbol', b'kindpat'}:
         return x
-    if op == 'not':
+    if op == b'not':
         t = _mergestatushints(x[1], instatus)
         return (op, t)
-    if op == 'and':
+    if op == b'and':
         ta = _mergestatushints(x[1], instatus)
         tb = _mergestatushints(x[2], instatus)
         return (op, ta, tb)
-    if op in {'list', 'or'}:
+    if op in {b'list', b'or'}:
         ts = tuple(_mergestatushints(y, instatus) for y in x[1:])
         return (op,) + ts
-    if op == 'func':
+    if op == b'func':
         # don't propagate 'instatus' crossing a function boundary
         ta = _mergestatushints(x[2], instatus=False)
         return (op, x[1], ta)
-    raise error.ProgrammingError('invalid operator %r' % op)
+    raise error.ProgrammingError(b'invalid operator %r' % op)
 
 
 def analyze(x):
@@ -273,8 +275,8 @@
 
 
 def _optimizeandops(op, ta, tb):
-    if tb is not None and tb[0] == 'not':
-        return ('minus', ta, tb[1])
+    if tb is not None and tb[0] == b'not':
+        return (b'minus', ta, tb[1])
     return (op, ta, tb)
 
 
@@ -283,14 +285,14 @@
     ws, ts, ss = [], [], []
     for x in xs:
         w, t = _optimize(x)
-        if t is not None and t[0] in {'string', 'symbol', 'kindpat'}:
+        if t is not None and t[0] in {b'string', b'symbol', b'kindpat'}:
             ss.append(t)
             continue
         ws.append(w)
         ts.append(t)
     if ss:
         ws.append(WEIGHT_CHECK_FILENAME)
-        ts.append(('patterns',) + tuple(ss))
+        ts.append((b'patterns',) + tuple(ss))
     return ws, ts
 
 
@@ -299,25 +301,25 @@
         return 0, x
 
     op = x[0]
-    if op == 'withstatus':
+    if op == b'withstatus':
         w, t = _optimize(x[1])
         return w, (op, t, x[2])
-    if op in {'string', 'symbol'}:
+    if op in {b'string', b'symbol'}:
         return WEIGHT_CHECK_FILENAME, x
-    if op == 'kindpat':
+    if op == b'kindpat':
         w, t = _optimize(x[2])
         return w, (op, x[1], t)
-    if op == 'not':
+    if op == b'not':
         w, t = _optimize(x[1])
         return w, (op, t)
-    if op == 'and':
+    if op == b'and':
         wa, ta = _optimize(x[1])
         wb, tb = _optimize(x[2])
         if wa <= wb:
             return wa, _optimizeandops(op, ta, tb)
         else:
             return wb, _optimizeandops(op, tb, ta)
-    if op == 'or':
+    if op == b'or':
         ws, ts = _optimizeunion(x[1:])
         if len(ts) == 1:
             return ws[0], ts[0]  # 'or' operation is fully optimized out
@@ -325,15 +327,15 @@
             it[1] for it in sorted(enumerate(ts), key=lambda it: ws[it[0]])
         )
         return max(ws), (op,) + ts
-    if op == 'list':
+    if op == b'list':
         ws, ts = zip(*(_optimize(y) for y in x[1:]))
         return sum(ws), (op,) + ts
-    if op == 'func':
+    if op == b'func':
         f = getsymbol(x[1])
         w = getattr(symbols.get(f), '_weight', 1)
         wa, ta = _optimize(x[2])
         return w + wa, (op, x[1], ta)
-    raise error.ProgrammingError('invalid operator %r' % op)
+    raise error.ProgrammingError(b'invalid operator %r' % op)
 
 
 def optimize(x):
@@ -346,4 +348,4 @@
 
 
 def prettyformat(tree):
-    return parser.prettyformat(tree, ('string', 'symbol'))
+    return parser.prettyformat(tree, (b'string', b'symbol'))
--- a/mercurial/formatter.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/formatter.py	Sun Oct 06 09:48:39 2019 -0400
@@ -193,15 +193,15 @@
             self._showitem()
         self._item = {}
 
-    def formatdate(self, date, fmt='%a %b %d %H:%M:%S %Y %1%2'):
+    def formatdate(self, date, fmt=b'%a %b %d %H:%M:%S %Y %1%2'):
         '''convert date tuple to appropriate format'''
         return self._converter.formatdate(date, fmt)
 
-    def formatdict(self, data, key='key', value='value', fmt=None, sep=' '):
+    def formatdict(self, data, key=b'key', value=b'value', fmt=None, sep=b' '):
         '''convert dict or key-value pairs to appropriate dict format'''
         return self._converter.formatdict(data, key, value, fmt, sep)
 
-    def formatlist(self, data, name, fmt=None, sep=' '):
+    def formatlist(self, data, name, fmt=None, sep=b' '):
         '''convert iterable to appropriate list format'''
         # name is mandatory argument for now, but it could be optional if
         # we have default template keyword, e.g. {item}
@@ -210,13 +210,13 @@
     def context(self, **ctxs):
         '''insert context objects to be used to render template keywords'''
         ctxs = pycompat.byteskwargs(ctxs)
-        assert all(k in {'repo', 'ctx', 'fctx'} for k in ctxs)
+        assert all(k in {b'repo', b'ctx', b'fctx'} for k in ctxs)
         if self._converter.storecontext:
             # populate missing resources in fctx -> ctx -> repo order
-            if 'fctx' in ctxs and 'ctx' not in ctxs:
-                ctxs['ctx'] = ctxs['fctx'].changectx()
-            if 'ctx' in ctxs and 'repo' not in ctxs:
-                ctxs['repo'] = ctxs['ctx'].repo()
+            if b'fctx' in ctxs and b'ctx' not in ctxs:
+                ctxs[b'ctx'] = ctxs[b'fctx'].changectx()
+            if b'ctx' in ctxs and b'repo' not in ctxs:
+                ctxs[b'repo'] = ctxs[b'ctx'].repo()
             self._item.update(ctxs)
 
     def datahint(self):
@@ -247,7 +247,7 @@
         '''check for plain formatter usage'''
         return False
 
-    def nested(self, field, tmpl=None, sep=''):
+    def nested(self, field, tmpl=None, sep=b''):
         '''sub formatter to store nested data in the specified field'''
         data = []
         self._item[field] = self._converter.wrapnested(data, tmpl, sep)
@@ -268,7 +268,9 @@
     '''build sub items and store them in the parent formatter'''
 
     def __init__(self, ui, converter, data):
-        baseformatter.__init__(self, ui, topic='', opts={}, converter=converter)
+        baseformatter.__init__(
+            self, ui, topic=b'', opts={}, converter=converter
+        )
         self._data = data
 
     def _showitem(self):
@@ -289,7 +291,7 @@
 
     @staticmethod
     def wrapnested(data, tmpl, sep):
-        raise error.ProgrammingError('plainformatter should never be nested')
+        raise error.ProgrammingError(b'plainformatter should never be nested')
 
     @staticmethod
     def formatdate(date, fmt):
@@ -301,7 +303,7 @@
         '''stringify key-value pairs separated by sep'''
         prefmt = pycompat.identity
         if fmt is None:
-            fmt = '%s=%s'
+            fmt = b'%s=%s'
             prefmt = pycompat.bytestr
         return sep.join(
             fmt % (prefmt(k), prefmt(v)) for k, v in _iteritems(data)
@@ -312,7 +314,7 @@
         '''stringify iterable separated by sep'''
         prefmt = pycompat.identity
         if fmt is None:
-            fmt = '%s'
+            fmt = b'%s'
             prefmt = pycompat.bytestr
         return sep.join(fmt % prefmt(e) for e in data)
 
@@ -351,7 +353,7 @@
     def isplain(self):
         return True
 
-    def nested(self, field, tmpl=None, sep=''):
+    def nested(self, field, tmpl=None, sep=b''):
         # nested data will be directly written to ui
         return self
 
@@ -363,16 +365,16 @@
     def __init__(self, ui, out, topic, opts):
         baseformatter.__init__(self, ui, topic, opts, _nullconverter)
         self._out = out
-        self._out.write("%s = [\n" % self._topic)
+        self._out.write(b"%s = [\n" % self._topic)
 
     def _showitem(self):
         self._out.write(
-            '    %s,\n' % stringutil.pprint(self._item, indent=4, level=1)
+            b'    %s,\n' % stringutil.pprint(self._item, indent=4, level=1)
         )
 
     def end(self):
         baseformatter.end(self)
-        self._out.write("]\n")
+        self._out.write(b"]\n")
 
 
 class pickleformatter(baseformatter):
@@ -409,29 +411,29 @@
     def __init__(self, ui, out, topic, opts):
         baseformatter.__init__(self, ui, topic, opts, _nullconverter)
         self._out = out
-        self._out.write("[")
+        self._out.write(b"[")
         self._first = True
 
     def _showitem(self):
         if self._first:
             self._first = False
         else:
-            self._out.write(",")
+            self._out.write(b",")
 
-        self._out.write("\n {\n")
+        self._out.write(b"\n {\n")
         first = True
         for k, v in sorted(self._item.items()):
             if first:
                 first = False
             else:
-                self._out.write(",\n")
+                self._out.write(b",\n")
             u = templatefilters.json(v, paranoid=False)
-            self._out.write('  "%s": %s' % (k, u))
-        self._out.write("\n }")
+            self._out.write(b'  "%s": %s' % (k, u))
+        self._out.write(b"\n }")
 
     def end(self):
         baseformatter.end(self)
-        self._out.write("\n]\n")
+        self._out.write(b"\n]\n")
 
 
 class _templateconverter(object):
@@ -476,7 +478,7 @@
     def __init__(self, ui, out, topic, opts):
         baseformatter.__init__(self, ui, topic, opts, _templateconverter)
         self._out = out
-        spec = lookuptemplate(ui, topic, opts.get('template', ''))
+        spec = lookuptemplate(ui, topic, opts.get(b'template', b''))
         self._tref = spec.ref
         self._t = loadtemplater(
             ui,
@@ -486,16 +488,16 @@
             cache=templatekw.defaulttempl,
         )
         self._parts = templatepartsmap(
-            spec, self._t, ['docheader', 'docfooter', 'separator']
+            spec, self._t, [b'docheader', b'docfooter', b'separator']
         )
         self._counter = itertools.count()
-        self._renderitem('docheader', {})
+        self._renderitem(b'docheader', {})
 
     def _showitem(self):
         item = self._item.copy()
-        item['index'] = index = next(self._counter)
+        item[b'index'] = index = next(self._counter)
         if index > 0:
-            self._renderitem('separator', {})
+            self._renderitem(b'separator', {})
         self._renderitem(self._tref, item)
 
     def _renderitem(self, part, item):
@@ -514,7 +516,7 @@
 
     def end(self):
         baseformatter.end(self)
-        self._renderitem('docfooter', {})
+        self._renderitem(b'docfooter', {})
 
 
 @attr.s(frozen=True)
@@ -544,36 +546,36 @@
     """
 
     # looks like a literal template?
-    if '{' in tmpl:
-        return templatespec('', tmpl, None)
+    if b'{' in tmpl:
+        return templatespec(b'', tmpl, None)
 
     # perhaps a stock style?
     if not os.path.split(tmpl)[0]:
         mapname = templater.templatepath(
-            'map-cmdline.' + tmpl
+            b'map-cmdline.' + tmpl
         ) or templater.templatepath(tmpl)
         if mapname and os.path.isfile(mapname):
             return templatespec(topic, None, mapname)
 
     # perhaps it's a reference to [templates]
-    if ui.config('templates', tmpl):
+    if ui.config(b'templates', tmpl):
         return templatespec(tmpl, None, None)
 
-    if tmpl == 'list':
-        ui.write(_("available styles: %s\n") % templater.stylelist())
-        raise error.Abort(_("specify a template"))
+    if tmpl == b'list':
+        ui.write(_(b"available styles: %s\n") % templater.stylelist())
+        raise error.Abort(_(b"specify a template"))
 
     # perhaps it's a path to a map or a template
-    if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
+    if (b'/' in tmpl or b'\\' in tmpl) and os.path.isfile(tmpl):
         # is it a mapfile for a style?
-        if os.path.basename(tmpl).startswith("map-"):
+        if os.path.basename(tmpl).startswith(b"map-"):
             return templatespec(topic, None, os.path.realpath(tmpl))
-        with util.posixfile(tmpl, 'rb') as f:
+        with util.posixfile(tmpl, b'rb') as f:
             tmpl = f.read()
-        return templatespec('', tmpl, None)
+        return templatespec(b'', tmpl, None)
 
     # constant string?
-    return templatespec('', tmpl, None)
+    return templatespec(b'', tmpl, None)
 
 
 def templatepartsmap(spec, t, partnames):
@@ -583,7 +585,7 @@
         partsmap.update((p, p) for p in partnames if p in t)
     elif spec.ref:
         for part in partnames:
-            ref = '%s:%s' % (spec.ref, part)  # select config sub-section
+            ref = b'%s:%s' % (spec.ref, part)  # select config sub-section
             if ref in t:
                 partsmap[part] = ref
     return partsmap
@@ -605,15 +607,15 @@
 
 def maketemplater(ui, tmpl, defaults=None, resources=None, cache=None):
     """Create a templater from a string template 'tmpl'"""
-    aliases = ui.configitems('templatealias')
+    aliases = ui.configitems(b'templatealias')
     t = templater.templater(
         defaults=defaults, resources=resources, cache=cache, aliases=aliases
     )
     t.cache.update(
-        (k, templater.unquotestring(v)) for k, v in ui.configitems('templates')
+        (k, templater.unquotestring(v)) for k, v in ui.configitems(b'templates')
     )
     if tmpl:
-        t.cache[''] = tmpl
+        t.cache[b''] = tmpl
     return t
 
 
@@ -627,9 +629,9 @@
 
     def __init__(self, ui, repo=None):
         self._resmap = {
-            'cache': {},  # for templatekw/funcs to store reusable data
-            'repo': repo,
-            'ui': ui,
+            b'cache': {},  # for templatekw/funcs to store reusable data
+            b'repo': repo,
+            b'ui': ui,
         }
 
     def availablekeys(self, mapping):
@@ -638,7 +640,7 @@
         }
 
     def knownkeys(self):
-        return {'cache', 'ctx', 'fctx', 'repo', 'revcache', 'ui'}
+        return {b'cache', b'ctx', b'fctx', b'repo', b'revcache', b'ui'}
 
     def lookup(self, mapping, key):
         if key not in self.knownkeys():
@@ -651,16 +653,16 @@
     def populatemap(self, context, origmapping, newmapping):
         mapping = {}
         if self._hasnodespec(newmapping):
-            mapping['revcache'] = {}  # per-ctx cache
+            mapping[b'revcache'] = {}  # per-ctx cache
         if self._hasnodespec(origmapping) and self._hasnodespec(newmapping):
-            orignode = templateutil.runsymbol(context, origmapping, 'node')
-            mapping['originalnode'] = orignode
+            orignode = templateutil.runsymbol(context, origmapping, b'node')
+            mapping[b'originalnode'] = orignode
         # put marker to override 'ctx'/'fctx' in mapping if any, and flag
         # its existence to be reported by availablekeys()
-        if 'ctx' not in newmapping and self._hasliteral(newmapping, 'node'):
-            mapping['ctx'] = _placeholder
-        if 'fctx' not in newmapping and self._hasliteral(newmapping, 'path'):
-            mapping['fctx'] = _placeholder
+        if b'ctx' not in newmapping and self._hasliteral(newmapping, b'node'):
+            mapping[b'ctx'] = _placeholder
+        if b'fctx' not in newmapping and self._hasliteral(newmapping, b'path'):
+            mapping[b'fctx'] = _placeholder
         return mapping
 
     def _getsome(self, mapping, key):
@@ -682,11 +684,11 @@
 
     def _hasnodespec(self, mapping):
         """Test if context revision is set or unset in the given mapping"""
-        return 'node' in mapping or 'ctx' in mapping
+        return b'node' in mapping or b'ctx' in mapping
 
     def _loadctx(self, mapping):
-        repo = self._getsome(mapping, 'repo')
-        node = self._getliteral(mapping, 'node')
+        repo = self._getsome(mapping, b'repo')
+        node = self._getliteral(mapping, b'node')
         if repo is None or node is None:
             return
         try:
@@ -695,8 +697,8 @@
             return None  # maybe hidden/non-existent node
 
     def _loadfctx(self, mapping):
-        ctx = self._getsome(mapping, 'ctx')
-        path = self._getliteral(mapping, 'path')
+        ctx = self._getsome(mapping, b'ctx')
+        path = self._getliteral(mapping, b'path')
         if ctx is None or path is None:
             return None
         try:
@@ -705,28 +707,28 @@
             return None  # maybe removed file?
 
     _loadermap = {
-        'ctx': _loadctx,
-        'fctx': _loadfctx,
+        b'ctx': _loadctx,
+        b'fctx': _loadfctx,
     }
 
 
 def formatter(ui, out, topic, opts):
-    template = opts.get("template", "")
-    if template == "cbor":
+    template = opts.get(b"template", b"")
+    if template == b"cbor":
         return cborformatter(ui, out, topic, opts)
-    elif template == "json":
+    elif template == b"json":
         return jsonformatter(ui, out, topic, opts)
-    elif template == "pickle":
+    elif template == b"pickle":
         return pickleformatter(ui, out, topic, opts)
-    elif template == "debug":
+    elif template == b"debug":
         return debugformatter(ui, out, topic, opts)
-    elif template != "":
+    elif template != b"":
         return templateformatter(ui, out, topic, opts)
     # developer config: ui.formatdebug
-    elif ui.configbool('ui', 'formatdebug'):
+    elif ui.configbool(b'ui', b'formatdebug'):
         return debugformatter(ui, out, topic, opts)
     # deprecated config: ui.formatjson
-    elif ui.configbool('ui', 'formatjson'):
+    elif ui.configbool(b'ui', b'formatjson'):
         return jsonformatter(ui, out, topic, opts)
     return plainformatter(ui, out, topic, opts)
 
@@ -737,7 +739,7 @@
 
     Must be invoked using the 'with' statement.
     """
-    with util.posixfile(filename, 'wb') as out:
+    with util.posixfile(filename, b'wb') as out:
         with formatter(ui, out, topic, opts) as fm:
             yield fm
 
--- a/mercurial/graphmod.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/graphmod.py	Sun Oct 06 09:48:39 2019 -0400
@@ -27,15 +27,15 @@
     util,
 )
 
-CHANGESET = 'C'
-PARENT = 'P'
-GRANDPARENT = 'G'
-MISSINGPARENT = 'M'
+CHANGESET = b'C'
+PARENT = b'P'
+GRANDPARENT = b'G'
+MISSINGPARENT = b'M'
 # Style of line to draw. None signals a line that ends and is removed at this
 # point. A number prefix means only the last N characters of the current block
 # will use that style, the rest will use the PARENT style. Add a - sign
 # (so making N negative) and all but the first N characters use that style.
-EDGES = {PARENT: '|', GRANDPARENT: ':', MISSINGPARENT: None}
+EDGES = {PARENT: b'|', GRANDPARENT: b':', MISSINGPARENT: None}
 
 
 def dagwalker(repo, revs):
@@ -118,13 +118,13 @@
     newcolor = 1
     config = {}
 
-    for key, val in repo.ui.configitems('graph'):
-        if '.' in key:
-            branch, setting = key.rsplit('.', 1)
+    for key, val in repo.ui.configitems(b'graph'):
+        if b'.' in key:
+            branch, setting = key.rsplit(b'.', 1)
             # Validation
-            if setting == "width" and val.isdigit():
+            if setting == b"width" and val.isdigit():
                 config.setdefault(branch, {})[setting] = int(val)
-            elif setting == "color" and val.isalnum():
+            elif setting == b"color" and val.isalnum():
                 config.setdefault(branch, {})[setting] = val
 
     if config:
@@ -168,8 +168,8 @@
                         ecol,
                         next.index(eid),
                         colors[eid],
-                        bconf.get('width', -1),
-                        bconf.get('color', ''),
+                        bconf.get(b'width', -1),
+                        bconf.get(b'color', b''),
                     )
                 )
             elif eid == cur:
@@ -180,8 +180,8 @@
                             ecol,
                             next.index(p),
                             color,
-                            bconf.get('width', -1),
-                            bconf.get('color', ''),
+                            bconf.get(b'width', -1),
+                            bconf.get(b'color', b''),
                         )
                     )
 
@@ -192,7 +192,7 @@
 
 def asciiedges(type, char, state, rev, parents):
     """adds edge info to changelog DAG walk suitable for ascii()"""
-    seen = state['seen']
+    seen = state[b'seen']
     if rev not in seen:
         seen.append(rev)
     nodeidx = seen.index(rev)
@@ -207,7 +207,7 @@
             knownparents.append(parent)
         else:
             newparents.append(parent)
-            state['edges'][parent] = state['styles'].get(ptype, '|')
+            state[b'edges'][parent] = state[b'styles'].get(ptype, b'|')
 
     ncols = len(seen)
     width = 1 + ncols * 2
@@ -226,7 +226,7 @@
         nmorecols = 1
         width += 2
         yield (type, char, width, (nodeidx, edges, ncols, nmorecols))
-        char = '\\'
+        char = b'\\'
         nodeidx += 1
         ncols += 1
         edges = []
@@ -240,7 +240,7 @@
     if nmorecols > 0:
         width += 2
     # remove current node from edge characters, no longer needed
-    state['edges'].pop(rev, None)
+    state[b'edges'].pop(rev, None)
     yield (type, char, width, (nodeidx, edges, ncols, nmorecols))
 
 
@@ -256,10 +256,10 @@
         if coldiff == -1:
             start = max(idx + 1, pidx)
             tail = echars[idx * 2 : (start - 1) * 2]
-            tail.extend(["/", " "] * (ncols - start))
+            tail.extend([b"/", b" "] * (ncols - start))
             return tail
         else:
-            return ["\\", " "] * (ncols - idx - 1)
+            return [b"\\", b" "] * (ncols - idx - 1)
     else:
         remainder = ncols - idx - 1
         return echars[-(remainder * 2) :] if remainder > 0 else []
@@ -268,20 +268,20 @@
 def _drawedges(echars, edges, nodeline, interline):
     for (start, end) in edges:
         if start == end + 1:
-            interline[2 * end + 1] = "/"
+            interline[2 * end + 1] = b"/"
         elif start == end - 1:
-            interline[2 * start + 1] = "\\"
+            interline[2 * start + 1] = b"\\"
         elif start == end:
             interline[2 * start] = echars[2 * start]
         else:
             if 2 * end >= len(nodeline):
                 continue
-            nodeline[2 * end] = "+"
+            nodeline[2 * end] = b"+"
             if start > end:
                 (start, end) = (end, start)
             for i in range(2 * start + 1, 2 * end):
-                if nodeline[i] != "+":
-                    nodeline[i] = "-"
+                if nodeline[i] != b"+":
+                    nodeline[i] = b"-"
 
 
 def _getpaddingline(echars, idx, ncols, edges):
@@ -297,7 +297,7 @@
         # | | |             | | |
         line.extend(echars[idx * 2 : (idx + 1) * 2])
     else:
-        line.extend([' ', ' '])
+        line.extend([b' ', b' '])
     # all edges to the right of the current node
     remainder = ncols - idx - 1
     if remainder > 0:
@@ -322,7 +322,7 @@
     while edgechars and edgechars[-1] is None:
         edgechars.pop()
     shift_size = max((edgechars.count(None) * 2) - 1, 0)
-    minlines = 3 if not state['graphshorten'] else 2
+    minlines = 3 if not state[b'graphshorten'] else 2
     while len(lines) < minlines + shift_size:
         lines.append(extra[:])
 
@@ -338,17 +338,17 @@
         targets = list(range(first_empty, first_empty + len(toshift) * 2, 2))
         positions = toshift[:]
         for line in lines[-shift_size:]:
-            line[first_empty:] = [' '] * (len(line) - first_empty)
+            line[first_empty:] = [b' '] * (len(line) - first_empty)
             for i in range(len(positions)):
                 pos = positions[i] - 1
                 positions[i] = max(pos, targets[i])
-                line[pos] = '/' if pos > targets[i] else extra[toshift[i]]
+                line[pos] = b'/' if pos > targets[i] else extra[toshift[i]]
 
-    map = {1: '|', 2: '~'} if not state['graphshorten'] else {1: '~'}
+    map = {1: b'|', 2: b'~'} if not state[b'graphshorten'] else {1: b'~'}
     for i, line in enumerate(lines):
         if None not in line:
             continue
-        line[:] = [c or map.get(i, ' ') for c in line]
+        line[:] = [c or map.get(i, b' ') for c in line]
 
     # remove edges that ended
     remove = [p for p, c in edgemap.items() if c is None]
@@ -360,12 +360,12 @@
 def asciistate():
     """returns the initial value for the "state" argument to ascii()"""
     return {
-        'seen': [],
-        'edges': {},
-        'lastcoldiff': 0,
-        'lastindex': 0,
-        'styles': EDGES.copy(),
-        'graphshorten': False,
+        b'seen': [],
+        b'edges': {},
+        b'lastcoldiff': 0,
+        b'lastindex': 0,
+        b'styles': EDGES.copy(),
+        b'graphshorten': False,
     }
 
 
@@ -383,7 +383,7 @@
     without needing to mimic all of the edge-fixup logic in ascii()
     """
     for (ln, logstr) in graph:
-        ui.write((ln + logstr).rstrip() + "\n")
+        ui.write((ln + logstr).rstrip() + b"\n")
 
 
 def ascii(ui, state, type, char, text, coldata):
@@ -409,11 +409,11 @@
     idx, edges, ncols, coldiff = coldata
     assert -2 < coldiff < 2
 
-    edgemap, seen = state['edges'], state['seen']
+    edgemap, seen = state[b'edges'], state[b'seen']
     # Be tolerant of history issues; make sure we have at least ncols + coldiff
     # elements to work with. See test-glog.t for broken history test cases.
-    echars = [c for p in seen for c in (edgemap.get(p, '|'), ' ')]
-    echars.extend(('|', ' ') * max(ncols + coldiff - len(seen), 0))
+    echars = [c for p in seen for c in (edgemap.get(p, b'|'), b' ')]
+    echars.extend((b'|', b' ') * max(ncols + coldiff - len(seen), 0))
 
     if coldiff == -1:
         # Transform
@@ -446,16 +446,16 @@
 
     # nodeline is the line containing the node character (typically o)
     nodeline = echars[: idx * 2]
-    nodeline.extend([char, " "])
+    nodeline.extend([char, b" "])
 
     nodeline.extend(
         _getnodelineedgestail(
             echars,
             idx,
-            state['lastindex'],
+            state[b'lastindex'],
             ncols,
             coldiff,
-            state['lastcoldiff'],
+            state[b'lastcoldiff'],
             fix_nodeline_tail,
         )
     )
@@ -464,16 +464,16 @@
     # edges between this entry and the next
     shift_interline = echars[: idx * 2]
     for i in pycompat.xrange(2 + coldiff):
-        shift_interline.append(' ')
+        shift_interline.append(b' ')
     count = ncols - idx - 1
     if coldiff == -1:
         for i in pycompat.xrange(count):
-            shift_interline.extend(['/', ' '])
+            shift_interline.extend([b'/', b' '])
     elif coldiff == 0:
         shift_interline.extend(echars[(idx + 1) * 2 : ncols * 2])
     else:
         for i in pycompat.xrange(count):
-            shift_interline.extend(['\\', ' '])
+            shift_interline.extend([b'\\', b' '])
 
     # draw edges from the current node to its parents
     _drawedges(echars, edges, nodeline, shift_interline)
@@ -485,7 +485,7 @@
 
     # If 'graphshorten' config, only draw shift_interline
     # when there is any non vertical flow in graph.
-    if state['graphshorten']:
+    if state[b'graphshorten']:
         if any(c in br'\/' for c in shift_interline if c):
             lines.append(shift_interline)
     # Else, no 'graphshorten' config so draw shift_interline.
@@ -502,13 +502,15 @@
     _drawendinglines(lines, extra_interline, edgemap, seen, state)
 
     while len(text) < len(lines):
-        text.append("")
+        text.append(b"")
 
     # print lines
     indentation_level = max(ncols, ncols + coldiff)
-    lines = ["%-*s " % (2 * indentation_level, "".join(line)) for line in lines]
+    lines = [
+        b"%-*s " % (2 * indentation_level, b"".join(line)) for line in lines
+    ]
     outputgraph(ui, zip(lines, text))
 
     # ... and start over
-    state['lastcoldiff'] = coldiff
-    state['lastindex'] = idx
+    state[b'lastcoldiff'] = coldiff
+    state[b'lastindex'] = idx
--- a/mercurial/hbisect.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/hbisect.py	Sun Oct 06 09:48:39 2019 -0400
@@ -34,32 +34,32 @@
     repo = repo.unfiltered()
     changelog = repo.changelog
     clparents = changelog.parentrevs
-    skip = {changelog.rev(n) for n in state['skip']}
+    skip = {changelog.rev(n) for n in state[b'skip']}
 
     def buildancestors(bad, good):
         badrev = min([changelog.rev(n) for n in bad])
         ancestors = collections.defaultdict(lambda: None)
-        for rev in repo.revs("descendants(%ln) - ancestors(%ln)", good, good):
+        for rev in repo.revs(b"descendants(%ln) - ancestors(%ln)", good, good):
             ancestors[rev] = []
         if ancestors[badrev] is None:
             return badrev, None
         return badrev, ancestors
 
     good = False
-    badrev, ancestors = buildancestors(state['bad'], state['good'])
+    badrev, ancestors = buildancestors(state[b'bad'], state[b'good'])
     if not ancestors:  # looking for bad to good transition?
         good = True
-        badrev, ancestors = buildancestors(state['good'], state['bad'])
+        badrev, ancestors = buildancestors(state[b'good'], state[b'bad'])
     bad = changelog.node(badrev)
     if not ancestors:  # now we're confused
         if (
-            len(state['bad']) == 1
-            and len(state['good']) == 1
-            and state['bad'] != state['good']
+            len(state[b'bad']) == 1
+            and len(state[b'good']) == 1
+            and state[b'bad'] != state[b'good']
         ):
-            raise error.Abort(_("starting revisions are not directly related"))
+            raise error.Abort(_(b"starting revisions are not directly related"))
         raise error.Abort(
-            _("inconsistent state, %d:%s is good and bad")
+            _(b"inconsistent state, %d:%s is good and bad")
             % (badrev, short(bad))
         )
 
@@ -133,9 +133,9 @@
     parents = repo[nodes[0]].parents()
     if len(parents) > 1:
         if good:
-            side = state['bad']
+            side = state[b'bad']
         else:
-            side = state['good']
+            side = state[b'good']
         num = len(set(i.node() for i in parents) & set(side))
         if num == 1:
             return parents[0].ancestor(parents[1])
@@ -143,41 +143,41 @@
 
 
 def load_state(repo):
-    state = {'current': [], 'good': [], 'bad': [], 'skip': []}
-    for l in repo.vfs.tryreadlines("bisect.state"):
+    state = {b'current': [], b'good': [], b'bad': [], b'skip': []}
+    for l in repo.vfs.tryreadlines(b"bisect.state"):
         kind, node = l[:-1].split()
         node = repo.unfiltered().lookup(node)
         if kind not in state:
-            raise error.Abort(_("unknown bisect kind %s") % kind)
+            raise error.Abort(_(b"unknown bisect kind %s") % kind)
         state[kind].append(node)
     return state
 
 
 def save_state(repo, state):
-    f = repo.vfs("bisect.state", "w", atomictemp=True)
+    f = repo.vfs(b"bisect.state", b"w", atomictemp=True)
     with repo.wlock():
         for kind in sorted(state):
             for node in state[kind]:
-                f.write("%s %s\n" % (kind, hex(node)))
+                f.write(b"%s %s\n" % (kind, hex(node)))
         f.close()
 
 
 def resetstate(repo):
     """remove any bisect state from the repository"""
-    if repo.vfs.exists("bisect.state"):
-        repo.vfs.unlink("bisect.state")
+    if repo.vfs.exists(b"bisect.state"):
+        repo.vfs.unlink(b"bisect.state")
 
 
 def checkstate(state):
     """check we have both 'good' and 'bad' to define a range
 
     Raise Abort exception otherwise."""
-    if state['good'] and state['bad']:
+    if state[b'good'] and state[b'bad']:
         return True
-    if not state['good']:
-        raise error.Abort(_('cannot bisect (no known good revisions)'))
+    if not state[b'good']:
+        raise error.Abort(_(b'cannot bisect (no known good revisions)'))
     else:
-        raise error.Abort(_('cannot bisect (no known bad revisions)'))
+        raise error.Abort(_(b'cannot bisect (no known bad revisions)'))
 
 
 def get(repo, status):
@@ -193,7 +193,7 @@
     - ``current``            : the cset currently being bisected
     """
     state = load_state(repo)
-    if status in ('good', 'bad', 'skip', 'current'):
+    if status in (b'good', b'bad', b'skip', b'current'):
         return map(repo.unfiltered().changelog.rev, state[status])
     else:
         # In the following sets, we do *not* call 'bisect()' with more
@@ -204,77 +204,77 @@
         # 'range' is all csets that make the bisection:
         #   - have a good ancestor and a bad descendant, or conversely
         # that's because the bisection can go either way
-        range = '( bisect(bad)::bisect(good) | bisect(good)::bisect(bad) )'
+        range = b'( bisect(bad)::bisect(good) | bisect(good)::bisect(bad) )'
 
-        _t = repo.revs('bisect(good)::bisect(bad)')
+        _t = repo.revs(b'bisect(good)::bisect(bad)')
         # The sets of topologically good or bad csets
         if len(_t) == 0:
             # Goods are topologically after bads
-            goods = 'bisect(good)::'  # Pruned good csets
-            bads = '::bisect(bad)'  # Pruned bad csets
+            goods = b'bisect(good)::'  # Pruned good csets
+            bads = b'::bisect(bad)'  # Pruned bad csets
         else:
             # Goods are topologically before bads
-            goods = '::bisect(good)'  # Pruned good csets
-            bads = 'bisect(bad)::'  # Pruned bad csets
+            goods = b'::bisect(good)'  # Pruned good csets
+            bads = b'bisect(bad)::'  # Pruned bad csets
 
         # 'pruned' is all csets whose fate is already known: good, bad, skip
-        skips = 'bisect(skip)'  # Pruned skipped csets
-        pruned = '( (%s) | (%s) | (%s) )' % (goods, bads, skips)
+        skips = b'bisect(skip)'  # Pruned skipped csets
+        pruned = b'( (%s) | (%s) | (%s) )' % (goods, bads, skips)
 
         # 'untested' is all cset that are- in 'range', but not in 'pruned'
-        untested = '( (%s) - (%s) )' % (range, pruned)
+        untested = b'( (%s) - (%s) )' % (range, pruned)
 
         # 'ignored' is all csets that were not used during the bisection
         # due to DAG topology, but may however have had an impact.
         # E.g., a branch merged between bads and goods, but whose branch-
         # point is out-side of the range.
-        iba = '::bisect(bad) - ::bisect(good)'  # Ignored bads' ancestors
-        iga = '::bisect(good) - ::bisect(bad)'  # Ignored goods' ancestors
-        ignored = '( ( (%s) | (%s) ) - (%s) )' % (iba, iga, range)
+        iba = b'::bisect(bad) - ::bisect(good)'  # Ignored bads' ancestors
+        iga = b'::bisect(good) - ::bisect(bad)'  # Ignored goods' ancestors
+        ignored = b'( ( (%s) | (%s) ) - (%s) )' % (iba, iga, range)
 
-        if status == 'range':
+        if status == b'range':
             return repo.revs(range)
-        elif status == 'pruned':
+        elif status == b'pruned':
             return repo.revs(pruned)
-        elif status == 'untested':
+        elif status == b'untested':
             return repo.revs(untested)
-        elif status == 'ignored':
+        elif status == b'ignored':
             return repo.revs(ignored)
-        elif status == "goods":
+        elif status == b"goods":
             return repo.revs(goods)
-        elif status == "bads":
+        elif status == b"bads":
             return repo.revs(bads)
         else:
-            raise error.ParseError(_('invalid bisect state'))
+            raise error.ParseError(_(b'invalid bisect state'))
 
 
 def label(repo, node):
     rev = repo.changelog.rev(node)
 
     # Try explicit sets
-    if rev in get(repo, 'good'):
+    if rev in get(repo, b'good'):
         # i18n: bisect changeset status
-        return _('good')
-    if rev in get(repo, 'bad'):
+        return _(b'good')
+    if rev in get(repo, b'bad'):
         # i18n: bisect changeset status
-        return _('bad')
-    if rev in get(repo, 'skip'):
+        return _(b'bad')
+    if rev in get(repo, b'skip'):
         # i18n: bisect changeset status
-        return _('skipped')
-    if rev in get(repo, 'untested') or rev in get(repo, 'current'):
+        return _(b'skipped')
+    if rev in get(repo, b'untested') or rev in get(repo, b'current'):
         # i18n: bisect changeset status
-        return _('untested')
-    if rev in get(repo, 'ignored'):
+        return _(b'untested')
+    if rev in get(repo, b'ignored'):
         # i18n: bisect changeset status
-        return _('ignored')
+        return _(b'ignored')
 
     # Try implicit sets
-    if rev in get(repo, 'goods'):
+    if rev in get(repo, b'goods'):
         # i18n: bisect changeset status
-        return _('good (implicit)')
-    if rev in get(repo, 'bads'):
+        return _(b'good (implicit)')
+    if rev in get(repo, b'bads'):
         # i18n: bisect changeset status
-        return _('bad (implicit)')
+        return _(b'bad (implicit)')
 
     return None
 
@@ -284,17 +284,17 @@
     if len(nodes) == 1:
         # narrowed it down to a single revision
         if good:
-            ui.write(_("The first good revision is:\n"))
+            ui.write(_(b"The first good revision is:\n"))
         else:
-            ui.write(_("The first bad revision is:\n"))
+            ui.write(_(b"The first bad revision is:\n"))
         displayer.show(repo[nodes[0]])
         extendnode = extendrange(repo, state, nodes, good)
         if extendnode is not None:
             ui.write(
                 _(
-                    'Not all ancestors of this changeset have been'
-                    ' checked.\nUse bisect --extend to continue the '
-                    'bisection from\nthe common ancestor, %s.\n'
+                    b'Not all ancestors of this changeset have been'
+                    b' checked.\nUse bisect --extend to continue the '
+                    b'bisection from\nthe common ancestor, %s.\n'
                 )
                 % extendnode
             )
@@ -303,15 +303,15 @@
         if good:
             ui.write(
                 _(
-                    "Due to skipped revisions, the first "
-                    "good revision could be any of:\n"
+                    b"Due to skipped revisions, the first "
+                    b"good revision could be any of:\n"
                 )
             )
         else:
             ui.write(
                 _(
-                    "Due to skipped revisions, the first "
-                    "bad revision could be any of:\n"
+                    b"Due to skipped revisions, the first "
+                    b"bad revision could be any of:\n"
                 )
             )
         for n in nodes:
--- a/mercurial/help.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/help.py	Sun Oct 06 09:48:39 2019 -0400
@@ -38,15 +38,15 @@
 from .utils import compression
 
 _exclkeywords = {
-    "(ADVANCED)",
-    "(DEPRECATED)",
-    "(EXPERIMENTAL)",
+    b"(ADVANCED)",
+    b"(DEPRECATED)",
+    b"(EXPERIMENTAL)",
     # i18n: "(ADVANCED)" is a keyword, must be translated consistently
-    _("(ADVANCED)"),
+    _(b"(ADVANCED)"),
     # i18n: "(DEPRECATED)" is a keyword, must be translated consistently
-    _("(DEPRECATED)"),
+    _(b"(DEPRECATED)"),
     # i18n: "(EXPERIMENTAL)" is a keyword, must be translated consistently
-    _("(EXPERIMENTAL)"),
+    _(b"(EXPERIMENTAL)"),
 }
 
 # The order in which command categories will be displayed.
@@ -72,28 +72,28 @@
 # Human-readable category names. These are translated.
 # Extensions with custom categories should add their names here.
 CATEGORY_NAMES = {
-    registrar.command.CATEGORY_REPO_CREATION: 'Repository creation',
-    registrar.command.CATEGORY_REMOTE_REPO_MANAGEMENT: 'Remote repository management',
-    registrar.command.CATEGORY_COMMITTING: 'Change creation',
-    registrar.command.CATEGORY_CHANGE_NAVIGATION: 'Change navigation',
-    registrar.command.CATEGORY_CHANGE_MANAGEMENT: 'Change manipulation',
-    registrar.command.CATEGORY_CHANGE_ORGANIZATION: 'Change organization',
-    registrar.command.CATEGORY_WORKING_DIRECTORY: 'Working directory management',
-    registrar.command.CATEGORY_FILE_CONTENTS: 'File content management',
-    registrar.command.CATEGORY_IMPORT_EXPORT: 'Change import/export',
-    registrar.command.CATEGORY_MAINTENANCE: 'Repository maintenance',
-    registrar.command.CATEGORY_HELP: 'Help',
-    registrar.command.CATEGORY_MISC: 'Miscellaneous commands',
-    registrar.command.CATEGORY_NONE: 'Uncategorized commands',
+    registrar.command.CATEGORY_REPO_CREATION: b'Repository creation',
+    registrar.command.CATEGORY_REMOTE_REPO_MANAGEMENT: b'Remote repository management',
+    registrar.command.CATEGORY_COMMITTING: b'Change creation',
+    registrar.command.CATEGORY_CHANGE_NAVIGATION: b'Change navigation',
+    registrar.command.CATEGORY_CHANGE_MANAGEMENT: b'Change manipulation',
+    registrar.command.CATEGORY_CHANGE_ORGANIZATION: b'Change organization',
+    registrar.command.CATEGORY_WORKING_DIRECTORY: b'Working directory management',
+    registrar.command.CATEGORY_FILE_CONTENTS: b'File content management',
+    registrar.command.CATEGORY_IMPORT_EXPORT: b'Change import/export',
+    registrar.command.CATEGORY_MAINTENANCE: b'Repository maintenance',
+    registrar.command.CATEGORY_HELP: b'Help',
+    registrar.command.CATEGORY_MISC: b'Miscellaneous commands',
+    registrar.command.CATEGORY_NONE: b'Uncategorized commands',
 }
 
 # Topic categories.
-TOPIC_CATEGORY_IDS = 'ids'
-TOPIC_CATEGORY_OUTPUT = 'output'
-TOPIC_CATEGORY_CONFIG = 'config'
-TOPIC_CATEGORY_CONCEPTS = 'concepts'
-TOPIC_CATEGORY_MISC = 'misc'
-TOPIC_CATEGORY_NONE = 'none'
+TOPIC_CATEGORY_IDS = b'ids'
+TOPIC_CATEGORY_OUTPUT = b'output'
+TOPIC_CATEGORY_CONFIG = b'config'
+TOPIC_CATEGORY_CONCEPTS = b'concepts'
+TOPIC_CATEGORY_MISC = b'misc'
+TOPIC_CATEGORY_NONE = b'none'
 
 # The order in which topic categories will be displayed.
 # Extensions with custom categories should insert them into this list
@@ -110,12 +110,12 @@
 
 # Human-readable topic category names. These are translated.
 TOPIC_CATEGORY_NAMES = {
-    TOPIC_CATEGORY_IDS: 'Mercurial identifiers',
-    TOPIC_CATEGORY_OUTPUT: 'Mercurial output',
-    TOPIC_CATEGORY_CONFIG: 'Mercurial configuration',
-    TOPIC_CATEGORY_CONCEPTS: 'Concepts',
-    TOPIC_CATEGORY_MISC: 'Miscellaneous',
-    TOPIC_CATEGORY_NONE: 'Uncategorized topics',
+    TOPIC_CATEGORY_IDS: b'Mercurial identifiers',
+    TOPIC_CATEGORY_OUTPUT: b'Mercurial output',
+    TOPIC_CATEGORY_CONFIG: b'Mercurial configuration',
+    TOPIC_CATEGORY_CONCEPTS: b'Concepts',
+    TOPIC_CATEGORY_MISC: b'Miscellaneous',
+    TOPIC_CATEGORY_NONE: b'Uncategorized topics',
 }
 
 
@@ -126,27 +126,27 @@
         for name, desc in sorted(exts.iteritems()):
             if not showdeprecated and any(w in desc for w in _exclkeywords):
                 continue
-            rst.append('%s:%s: %s\n' % (' ' * indent, name, desc))
+            rst.append(b'%s:%s: %s\n' % (b' ' * indent, name, desc))
     if rst:
-        rst.insert(0, '\n%s\n\n' % header)
+        rst.insert(0, b'\n%s\n\n' % header)
     return rst
 
 
 def extshelp(ui):
-    rst = loaddoc('extensions')(ui).splitlines(True)
+    rst = loaddoc(b'extensions')(ui).splitlines(True)
     rst.extend(
         listexts(
-            _('enabled extensions:'), extensions.enabled(), showdeprecated=True
+            _(b'enabled extensions:'), extensions.enabled(), showdeprecated=True
         )
     )
     rst.extend(
         listexts(
-            _('disabled extensions:'),
+            _(b'disabled extensions:'),
             extensions.disabled(),
             showdeprecated=ui.verbose,
         )
     )
-    doc = ''.join(rst)
+    doc = b''.join(rst)
     return doc
 
 
@@ -158,17 +158,17 @@
             shortopt, longopt, default, desc, optlabel = option
         else:
             shortopt, longopt, default, desc = option
-            optlabel = _("VALUE")  # default label
+            optlabel = _(b"VALUE")  # default label
 
         if not verbose and any(w in desc for w in _exclkeywords):
             continue
 
-        so = ''
+        so = b''
         if shortopt:
-            so = '-' + shortopt
-        lo = '--' + longopt
+            so = b'-' + shortopt
+        lo = b'--' + longopt
         if default is True:
-            lo = '--[no-]' + longopt
+            lo = b'--[no-]' + longopt
 
         if isinstance(default, fancyopts.customopt):
             default = default.getdefaultvalue()
@@ -179,34 +179,34 @@
             # then convert it to bytes.
             defaultstr = pycompat.bytestr(default)
             if default is True:
-                defaultstr = _("on")
-            desc += _(" (default: %s)") % defaultstr
+                defaultstr = _(b"on")
+            desc += _(b" (default: %s)") % defaultstr
 
         if isinstance(default, list):
-            lo += " %s [+]" % optlabel
+            lo += b" %s [+]" % optlabel
             multioccur = True
         elif (default is not None) and not isinstance(default, bool):
-            lo += " %s" % optlabel
+            lo += b" %s" % optlabel
 
         data.append((so, lo, desc))
 
     if multioccur:
-        header += _(" ([+] can be repeated)")
+        header += _(b" ([+] can be repeated)")
 
-    rst = ['\n%s:\n\n' % header]
+    rst = [b'\n%s:\n\n' % header]
     rst.extend(minirst.maketable(data, 1))
 
-    return ''.join(rst)
+    return b''.join(rst)
 
 
 def indicateomitted(rst, omitted, notomitted=None):
-    rst.append('\n\n.. container:: omitted\n\n    %s\n\n' % omitted)
+    rst.append(b'\n\n.. container:: omitted\n\n    %s\n\n' % omitted)
     if notomitted:
-        rst.append('\n\n.. container:: notomitted\n\n    %s\n\n' % notomitted)
+        rst.append(b'\n\n.. container:: notomitted\n\n    %s\n\n' % notomitted)
 
 
 def filtercmd(ui, cmd, func, kw, doc):
-    if not ui.debugflag and cmd.startswith("debug") and kw != "debug":
+    if not ui.debugflag and cmd.startswith(b"debug") and kw != b"debug":
         # Debug command, and user is not looking for those.
         return True
     if not ui.verbose:
@@ -219,17 +219,17 @@
         if doc and any(w in doc for w in _exclkeywords):
             # Documentation has excluded keywords.
             return True
-    if kw == "shortlist" and not getattr(func, 'helpbasic', False):
+    if kw == b"shortlist" and not getattr(func, 'helpbasic', False):
         # We're presenting the short list but the command is not basic.
         return True
-    if ui.configbool('help', 'hidden-command.%s' % cmd):
+    if ui.configbool(b'help', b'hidden-command.%s' % cmd):
         # Configuration explicitly hides the command.
         return True
     return False
 
 
 def filtertopic(ui, topic):
-    return ui.configbool('help', 'hidden-topic.%s' % topic, False)
+    return ui.configbool(b'help', b'hidden-topic.%s' % topic, False)
 
 
 def topicmatch(ui, commands, kw):
@@ -244,10 +244,10 @@
         return kw in encoding.lower(container)  # translated in helptable
 
     results = {
-        'topics': [],
-        'commands': [],
-        'extensions': [],
-        'extensioncommands': [],
+        b'topics': [],
+        b'commands': [],
+        b'extensions': [],
+        b'extensioncommands': [],
     }
     for topic in helptable:
         names, header, doc = topic[0:3]
@@ -259,15 +259,15 @@
         ):
             name = names[0]
             if not filtertopic(ui, name):
-                results['topics'].append((names[0], header))
+                results[b'topics'].append((names[0], header))
     for cmd, entry in commands.table.iteritems():
         if len(entry) == 3:
             summary = entry[2]
         else:
-            summary = ''
+            summary = b''
         # translate docs *before* searching there
         func = entry[0]
-        docs = _(pycompat.getdoc(func)) or ''
+        docs = _(pycompat.getdoc(func)) or b''
         if kw in cmd or lowercontains(summary) or lowercontains(docs):
             doclines = docs.splitlines()
             if doclines:
@@ -275,18 +275,18 @@
             cmdname = cmdutil.parsealiases(cmd)[0]
             if filtercmd(ui, cmdname, func, kw, docs):
                 continue
-            results['commands'].append((cmdname, summary))
+            results[b'commands'].append((cmdname, summary))
     for name, docs in itertools.chain(
         extensions.enabled(False).iteritems(), extensions.disabled().iteritems()
     ):
         if not docs:
             continue
-        name = name.rpartition('.')[-1]
+        name = name.rpartition(b'.')[-1]
         if lowercontains(name) or lowercontains(docs):
             # extension docs are already translated
-            results['extensions'].append((name, docs.splitlines()[0]))
+            results[b'extensions'].append((name, docs.splitlines()[0]))
         try:
-            mod = extensions.load(ui, name, '')
+            mod = extensions.load(ui, name, b'')
         except ImportError:
             # debug message would be printed in extensions.load()
             continue
@@ -298,10 +298,10 @@
                 if cmddoc:
                     cmddoc = gettext(cmddoc).splitlines()[0]
                 else:
-                    cmddoc = _('(no help text available)')
+                    cmddoc = _(b'(no help text available)')
                 if filtercmd(ui, cmdname, func, kw, cmddoc):
                     continue
-                results['extensioncommands'].append((cmdname, cmddoc))
+                results[b'extensioncommands'].append((cmdname, cmddoc))
     return results
 
 
@@ -309,10 +309,10 @@
     """Return a delayed loader for help/topic.txt."""
 
     def loader(ui):
-        docdir = os.path.join(util.datapath, 'help')
+        docdir = os.path.join(util.datapath, b'help')
         if subdir:
             docdir = os.path.join(docdir, subdir)
-        path = os.path.join(docdir, topic + ".txt")
+        path = os.path.join(docdir, topic + b".txt")
         doc = gettext(util.readfile(path))
         for rewriter in helphooks.get(topic, []):
             doc = rewriter(ui, topic, doc)
@@ -323,54 +323,54 @@
 
 internalstable = sorted(
     [
-        (['bundle2'], _('Bundle2'), loaddoc('bundle2', subdir='internals')),
-        (['bundles'], _('Bundles'), loaddoc('bundles', subdir='internals')),
-        (['cbor'], _('CBOR'), loaddoc('cbor', subdir='internals')),
-        (['censor'], _('Censor'), loaddoc('censor', subdir='internals')),
+        ([b'bundle2'], _(b'Bundle2'), loaddoc(b'bundle2', subdir=b'internals')),
+        ([b'bundles'], _(b'Bundles'), loaddoc(b'bundles', subdir=b'internals')),
+        ([b'cbor'], _(b'CBOR'), loaddoc(b'cbor', subdir=b'internals')),
+        ([b'censor'], _(b'Censor'), loaddoc(b'censor', subdir=b'internals')),
         (
-            ['changegroups'],
-            _('Changegroups'),
-            loaddoc('changegroups', subdir='internals'),
+            [b'changegroups'],
+            _(b'Changegroups'),
+            loaddoc(b'changegroups', subdir=b'internals'),
         ),
         (
-            ['config'],
-            _('Config Registrar'),
-            loaddoc('config', subdir='internals'),
+            [b'config'],
+            _(b'Config Registrar'),
+            loaddoc(b'config', subdir=b'internals'),
         ),
         (
-            ['extensions', 'extension'],
-            _('Extension API'),
-            loaddoc('extensions', subdir='internals'),
+            [b'extensions', b'extension'],
+            _(b'Extension API'),
+            loaddoc(b'extensions', subdir=b'internals'),
         ),
         (
-            ['mergestate'],
-            _('Mergestate'),
-            loaddoc('mergestate', subdir='internals'),
+            [b'mergestate'],
+            _(b'Mergestate'),
+            loaddoc(b'mergestate', subdir=b'internals'),
         ),
         (
-            ['requirements'],
-            _('Repository Requirements'),
-            loaddoc('requirements', subdir='internals'),
+            [b'requirements'],
+            _(b'Repository Requirements'),
+            loaddoc(b'requirements', subdir=b'internals'),
         ),
         (
-            ['revlogs'],
-            _('Revision Logs'),
-            loaddoc('revlogs', subdir='internals'),
+            [b'revlogs'],
+            _(b'Revision Logs'),
+            loaddoc(b'revlogs', subdir=b'internals'),
         ),
         (
-            ['wireprotocol'],
-            _('Wire Protocol'),
-            loaddoc('wireprotocol', subdir='internals'),
+            [b'wireprotocol'],
+            _(b'Wire Protocol'),
+            loaddoc(b'wireprotocol', subdir=b'internals'),
         ),
         (
-            ['wireprotocolrpc'],
-            _('Wire Protocol RPC'),
-            loaddoc('wireprotocolrpc', subdir='internals'),
+            [b'wireprotocolrpc'],
+            _(b'Wire Protocol RPC'),
+            loaddoc(b'wireprotocolrpc', subdir=b'internals'),
         ),
         (
-            ['wireprotocolv2'],
-            _('Wire Protocol Version 2'),
-            loaddoc('wireprotocolv2', subdir='internals'),
+            [b'wireprotocolv2'],
+            _(b'Wire Protocol Version 2'),
+            loaddoc(b'wireprotocolv2', subdir=b'internals'),
         ),
     ]
 )
@@ -379,138 +379,155 @@
 def internalshelp(ui):
     """Generate the index for the "internals" topic."""
     lines = [
-        'To access a subtopic, use "hg help internals.{subtopic-name}"\n',
-        '\n',
+        b'To access a subtopic, use "hg help internals.{subtopic-name}"\n',
+        b'\n',
     ]
     for names, header, doc in internalstable:
-        lines.append(' :%s: %s\n' % (names[0], header))
+        lines.append(b' :%s: %s\n' % (names[0], header))
 
-    return ''.join(lines)
+    return b''.join(lines)
 
 
 helptable = sorted(
     [
         (
-            ['bundlespec'],
-            _("Bundle File Formats"),
-            loaddoc('bundlespec'),
+            [b'bundlespec'],
+            _(b"Bundle File Formats"),
+            loaddoc(b'bundlespec'),
             TOPIC_CATEGORY_CONCEPTS,
         ),
         (
-            ['color'],
-            _("Colorizing Outputs"),
-            loaddoc('color'),
+            [b'color'],
+            _(b"Colorizing Outputs"),
+            loaddoc(b'color'),
             TOPIC_CATEGORY_OUTPUT,
         ),
         (
-            ["config", "hgrc"],
-            _("Configuration Files"),
-            loaddoc('config'),
+            [b"config", b"hgrc"],
+            _(b"Configuration Files"),
+            loaddoc(b'config'),
             TOPIC_CATEGORY_CONFIG,
         ),
         (
-            ['deprecated'],
-            _("Deprecated Features"),
-            loaddoc('deprecated'),
+            [b'deprecated'],
+            _(b"Deprecated Features"),
+            loaddoc(b'deprecated'),
             TOPIC_CATEGORY_MISC,
         ),
-        (["dates"], _("Date Formats"), loaddoc('dates'), TOPIC_CATEGORY_OUTPUT),
         (
-            ["flags"],
-            _("Command-line flags"),
-            loaddoc('flags'),
+            [b"dates"],
+            _(b"Date Formats"),
+            loaddoc(b'dates'),
+            TOPIC_CATEGORY_OUTPUT,
+        ),
+        (
+            [b"flags"],
+            _(b"Command-line flags"),
+            loaddoc(b'flags'),
             TOPIC_CATEGORY_CONFIG,
         ),
         (
-            ["patterns"],
-            _("File Name Patterns"),
-            loaddoc('patterns'),
+            [b"patterns"],
+            _(b"File Name Patterns"),
+            loaddoc(b'patterns'),
             TOPIC_CATEGORY_IDS,
         ),
         (
-            ['environment', 'env'],
-            _('Environment Variables'),
-            loaddoc('environment'),
+            [b'environment', b'env'],
+            _(b'Environment Variables'),
+            loaddoc(b'environment'),
             TOPIC_CATEGORY_CONFIG,
         ),
         (
-            ['revisions', 'revs', 'revsets', 'revset', 'multirevs', 'mrevs'],
-            _('Specifying Revisions'),
-            loaddoc('revisions'),
+            [
+                b'revisions',
+                b'revs',
+                b'revsets',
+                b'revset',
+                b'multirevs',
+                b'mrevs',
+            ],
+            _(b'Specifying Revisions'),
+            loaddoc(b'revisions'),
             TOPIC_CATEGORY_IDS,
         ),
         (
-            ['filesets', 'fileset'],
-            _("Specifying File Sets"),
-            loaddoc('filesets'),
+            [b'filesets', b'fileset'],
+            _(b"Specifying File Sets"),
+            loaddoc(b'filesets'),
             TOPIC_CATEGORY_IDS,
         ),
-        (['diffs'], _('Diff Formats'), loaddoc('diffs'), TOPIC_CATEGORY_OUTPUT),
         (
-            ['merge-tools', 'mergetools', 'mergetool'],
-            _('Merge Tools'),
-            loaddoc('merge-tools'),
+            [b'diffs'],
+            _(b'Diff Formats'),
+            loaddoc(b'diffs'),
+            TOPIC_CATEGORY_OUTPUT,
+        ),
+        (
+            [b'merge-tools', b'mergetools', b'mergetool'],
+            _(b'Merge Tools'),
+            loaddoc(b'merge-tools'),
             TOPIC_CATEGORY_CONFIG,
         ),
         (
-            ['templating', 'templates', 'template', 'style'],
-            _('Template Usage'),
-            loaddoc('templates'),
+            [b'templating', b'templates', b'template', b'style'],
+            _(b'Template Usage'),
+            loaddoc(b'templates'),
             TOPIC_CATEGORY_OUTPUT,
         ),
-        (['urls'], _('URL Paths'), loaddoc('urls'), TOPIC_CATEGORY_IDS),
+        ([b'urls'], _(b'URL Paths'), loaddoc(b'urls'), TOPIC_CATEGORY_IDS),
         (
-            ["extensions"],
-            _("Using Additional Features"),
+            [b"extensions"],
+            _(b"Using Additional Features"),
             extshelp,
             TOPIC_CATEGORY_CONFIG,
         ),
         (
-            ["subrepos", "subrepo"],
-            _("Subrepositories"),
-            loaddoc('subrepos'),
+            [b"subrepos", b"subrepo"],
+            _(b"Subrepositories"),
+            loaddoc(b'subrepos'),
             TOPIC_CATEGORY_CONCEPTS,
         ),
         (
-            ["hgweb"],
-            _("Configuring hgweb"),
-            loaddoc('hgweb'),
+            [b"hgweb"],
+            _(b"Configuring hgweb"),
+            loaddoc(b'hgweb'),
             TOPIC_CATEGORY_CONFIG,
         ),
         (
-            ["glossary"],
-            _("Glossary"),
-            loaddoc('glossary'),
+            [b"glossary"],
+            _(b"Glossary"),
+            loaddoc(b'glossary'),
             TOPIC_CATEGORY_CONCEPTS,
         ),
         (
-            ["hgignore", "ignore"],
-            _("Syntax for Mercurial Ignore Files"),
-            loaddoc('hgignore'),
+            [b"hgignore", b"ignore"],
+            _(b"Syntax for Mercurial Ignore Files"),
+            loaddoc(b'hgignore'),
             TOPIC_CATEGORY_IDS,
         ),
         (
-            ["phases"],
-            _("Working with Phases"),
-            loaddoc('phases'),
+            [b"phases"],
+            _(b"Working with Phases"),
+            loaddoc(b'phases'),
             TOPIC_CATEGORY_CONCEPTS,
         ),
         (
-            ['scripting'],
-            _('Using Mercurial from scripts and automation'),
-            loaddoc('scripting'),
+            [b'scripting'],
+            _(b'Using Mercurial from scripts and automation'),
+            loaddoc(b'scripting'),
             TOPIC_CATEGORY_MISC,
         ),
         (
-            ['internals'],
-            _("Technical implementation topics"),
+            [b'internals'],
+            _(b"Technical implementation topics"),
             internalshelp,
             TOPIC_CATEGORY_MISC,
         ),
         (
-            ['pager'],
-            _("Pager Support"),
-            loaddoc('pager'),
+            [b'pager'],
+            _(b"Pager Support"),
+            loaddoc(b'pager'),
             TOPIC_CATEGORY_CONFIG,
         ),
     ]
@@ -518,7 +535,7 @@
 
 # Maps topics with sub-topics to a list of their sub-topics.
 subtopics = {
-    'internals': internalstable,
+    b'internals': internalstable,
 }
 
 # Map topics to lists of callable taking the current topic help and
@@ -536,7 +553,7 @@
     """
     entries = []
     for name in sorted(items):
-        text = (pycompat.getdoc(items[name]) or '').rstrip()
+        text = (pycompat.getdoc(items[name]) or b'').rstrip()
         if not text or not ui.verbose and any(w in text for w in _exclkeywords):
             continue
         text = gettext(text)
@@ -547,14 +564,14 @@
         doclines = [(lines[0])]
         for l in lines[1:]:
             # Stop once we find some Python doctest
-            if l.strip().startswith('>>>'):
+            if l.strip().startswith(b'>>>'):
                 break
             if dedent:
                 doclines.append(l.rstrip())
             else:
-                doclines.append('  ' + l.strip())
-        entries.append('\n'.join(doclines))
-    entries = '\n\n'.join(entries)
+                doclines.append(b'  ' + l.strip())
+        entries.append(b'\n'.join(doclines))
+    entries = b'\n\n'.join(entries)
     return doc.replace(marker, entries)
 
 
@@ -566,33 +583,35 @@
 
 
 addtopicsymbols(
-    'bundlespec',
-    '.. bundlecompressionmarker',
+    b'bundlespec',
+    b'.. bundlecompressionmarker',
     compression.bundlecompressiontopics(),
 )
-addtopicsymbols('filesets', '.. predicatesmarker', fileset.symbols)
-addtopicsymbols('merge-tools', '.. internaltoolsmarker', filemerge.internalsdoc)
-addtopicsymbols('revisions', '.. predicatesmarker', revset.symbols)
-addtopicsymbols('templates', '.. keywordsmarker', templatekw.keywords)
-addtopicsymbols('templates', '.. filtersmarker', templatefilters.filters)
-addtopicsymbols('templates', '.. functionsmarker', templatefuncs.funcs)
+addtopicsymbols(b'filesets', b'.. predicatesmarker', fileset.symbols)
 addtopicsymbols(
-    'hgweb', '.. webcommandsmarker', webcommands.commands, dedent=True
+    b'merge-tools', b'.. internaltoolsmarker', filemerge.internalsdoc
+)
+addtopicsymbols(b'revisions', b'.. predicatesmarker', revset.symbols)
+addtopicsymbols(b'templates', b'.. keywordsmarker', templatekw.keywords)
+addtopicsymbols(b'templates', b'.. filtersmarker', templatefilters.filters)
+addtopicsymbols(b'templates', b'.. functionsmarker', templatefuncs.funcs)
+addtopicsymbols(
+    b'hgweb', b'.. webcommandsmarker', webcommands.commands, dedent=True
 )
 
 
 def inserttweakrc(ui, topic, doc):
-    marker = '.. tweakdefaultsmarker'
+    marker = b'.. tweakdefaultsmarker'
     repl = uimod.tweakrc
 
     def sub(m):
         lines = [m.group(1) + s for s in repl.splitlines()]
-        return '\n'.join(lines)
+        return b'\n'.join(lines)
 
     return re.sub(br'( *)%s' % re.escape(marker), sub, doc)
 
 
-addtopichook('config', inserttweakrc)
+addtopichook(b'config', inserttweakrc)
 
 
 def help_(
@@ -629,7 +648,7 @@
 
         # check if it's an invalid alias and display its error if it is
         if getattr(entry[0], 'badalias', None):
-            rst.append(entry[0].badalias + '\n')
+            rst.append(entry[0].badalias + b'\n')
             if entry[0].unknowncmd:
                 try:
                     rst.extend(helpextcmd(entry[0].cmdname))
@@ -639,31 +658,31 @@
 
         # synopsis
         if len(entry) > 2:
-            if entry[2].startswith('hg'):
-                rst.append("%s\n" % entry[2])
+            if entry[2].startswith(b'hg'):
+                rst.append(b"%s\n" % entry[2])
             else:
-                rst.append('hg %s %s\n' % (aliases[0], entry[2]))
+                rst.append(b'hg %s %s\n' % (aliases[0], entry[2]))
         else:
-            rst.append('hg %s\n' % aliases[0])
+            rst.append(b'hg %s\n' % aliases[0])
         # aliases
         if full and not ui.quiet and len(aliases) > 1:
-            rst.append(_("\naliases: %s\n") % ', '.join(aliases[1:]))
-        rst.append('\n')
+            rst.append(_(b"\naliases: %s\n") % b', '.join(aliases[1:]))
+        rst.append(b'\n')
 
         # description
         doc = gettext(pycompat.getdoc(entry[0]))
         if not doc:
-            doc = _("(no help text available)")
-        if util.safehasattr(entry[0], 'definition'):  # aliased command
+            doc = _(b"(no help text available)")
+        if util.safehasattr(entry[0], b'definition'):  # aliased command
             source = entry[0].source
-            if entry[0].definition.startswith('!'):  # shell alias
-                doc = _('shell alias for: %s\n\n%s\n\ndefined by: %s\n') % (
+            if entry[0].definition.startswith(b'!'):  # shell alias
+                doc = _(b'shell alias for: %s\n\n%s\n\ndefined by: %s\n') % (
                     entry[0].definition[1:],
                     doc,
                     source,
                 )
             else:
-                doc = _('alias for: hg %s\n\n%s\n\ndefined by: %s\n') % (
+                doc = _(b'alias for: hg %s\n\n%s\n\ndefined by: %s\n') % (
                     entry[0].definition,
                     doc,
                     source,
@@ -673,38 +692,39 @@
             rst.append(doc[0])
         else:
             rst.extend(doc)
-        rst.append('\n')
+        rst.append(b'\n')
 
         # check if this command shadows a non-trivial (multi-line)
         # extension help text
         try:
             mod = extensions.find(name)
-            doc = gettext(pycompat.getdoc(mod)) or ''
-            if '\n' in doc.strip():
+            doc = gettext(pycompat.getdoc(mod)) or b''
+            if b'\n' in doc.strip():
                 msg = _(
-                    "(use 'hg help -e %s' to show help for " "the %s extension)"
+                    b"(use 'hg help -e %s' to show help for "
+                    b"the %s extension)"
                 ) % (name, name)
-                rst.append('\n%s\n' % msg)
+                rst.append(b'\n%s\n' % msg)
         except KeyError:
             pass
 
         # options
         if not ui.quiet and entry[1]:
-            rst.append(optrst(_("options"), entry[1], ui.verbose))
+            rst.append(optrst(_(b"options"), entry[1], ui.verbose))
 
         if ui.verbose:
             rst.append(
-                optrst(_("global options"), commands.globalopts, ui.verbose)
+                optrst(_(b"global options"), commands.globalopts, ui.verbose)
             )
 
         if not ui.verbose:
             if not full:
-                rst.append(_("\n(use 'hg %s -h' to show more help)\n") % name)
+                rst.append(_(b"\n(use 'hg %s -h' to show more help)\n") % name)
             elif not ui.quiet:
                 rst.append(
                     _(
-                        '\n(some details hidden, use --verbose '
-                        'to show complete help)'
+                        b'\n(some details hidden, use --verbose '
+                        b'to show complete help)'
                     )
                 )
 
@@ -720,7 +740,7 @@
         for c, e in commands.table.iteritems():
             fs = cmdutil.parsealiases(c)
             f = fs[0]
-            syns[f] = ', '.join(fs)
+            syns[f] = b', '.join(fs)
             func = e[0]
             if select and not select(f):
                 continue
@@ -729,7 +749,7 @@
                 continue
             doc = gettext(doc)
             if not doc:
-                doc = _("(no help text available)")
+                doc = _(b"(no help text available)")
             h[f] = doc.splitlines()[0].rstrip()
 
             cat = getattr(func, 'helpcategory', None) or (
@@ -740,27 +760,27 @@
         rst = []
         if not h:
             if not ui.quiet:
-                rst.append(_('no commands defined\n'))
+                rst.append(_(b'no commands defined\n'))
             return rst
 
         # Output top header.
         if not ui.quiet:
-            if name == "shortlist":
-                rst.append(_('basic commands:\n\n'))
-            elif name == "debug":
-                rst.append(_('debug commands (internal and unsupported):\n\n'))
+            if name == b"shortlist":
+                rst.append(_(b'basic commands:\n\n'))
+            elif name == b"debug":
+                rst.append(_(b'debug commands (internal and unsupported):\n\n'))
             else:
-                rst.append(_('list of commands:\n'))
+                rst.append(_(b'list of commands:\n'))
 
         def appendcmds(cmds):
             cmds = sorted(cmds)
             for c in cmds:
                 if ui.verbose:
-                    rst.append(" :%s: %s\n" % (syns[c], h[c]))
+                    rst.append(b" :%s: %s\n" % (syns[c], h[c]))
                 else:
-                    rst.append(' :%s: %s\n' % (c, h[c]))
+                    rst.append(b' :%s: %s\n' % (c, h[c]))
 
-        if name in ('shortlist', 'debug'):
+        if name in (b'shortlist', b'debug'):
             # List without categories.
             appendcmds(h)
         else:
@@ -768,7 +788,7 @@
             missing_order = set(cats.keys()) - set(CATEGORY_ORDER)
             if missing_order:
                 ui.develwarn(
-                    'help categories missing from CATEGORY_ORDER: %s'
+                    b'help categories missing from CATEGORY_ORDER: %s'
                     % missing_order
                 )
 
@@ -778,23 +798,23 @@
                 if catfns:
                     if len(cats) > 1:
                         catname = gettext(CATEGORY_NAMES[cat])
-                        rst.append("\n%s:\n" % catname)
-                    rst.append("\n")
+                        rst.append(b"\n%s:\n" % catname)
+                    rst.append(b"\n")
                     appendcmds(catfns)
 
         ex = opts.get
         anyopts = ex(r'keyword') or not (ex(r'command') or ex(r'extension'))
         if not name and anyopts:
             exts = listexts(
-                _('enabled extensions:'),
+                _(b'enabled extensions:'),
                 extensions.enabled(),
                 showdeprecated=ui.verbose,
             )
             if exts:
-                rst.append('\n')
+                rst.append(b'\n')
                 rst.extend(exts)
 
-            rst.append(_("\nadditional help topics:\n"))
+            rst.append(_(b"\nadditional help topics:\n"))
             # Group commands by category.
             topiccats = {}
             for topic in helptable:
@@ -814,7 +834,7 @@
             missing_order = set(topiccats.keys()) - set(TOPIC_CATEGORY_ORDER)
             if missing_order:
                 ui.develwarn(
-                    'help categories missing from TOPIC_CATEGORY_ORDER: %s'
+                    b'help categories missing from TOPIC_CATEGORY_ORDER: %s'
                     % missing_order
                 )
 
@@ -824,50 +844,50 @@
                 if topics:
                     if len(topiccats) > 1:
                         catname = gettext(TOPIC_CATEGORY_NAMES[cat])
-                        rst.append("\n%s:\n" % catname)
-                    rst.append("\n")
+                        rst.append(b"\n%s:\n" % catname)
+                    rst.append(b"\n")
                     for t, desc in topics:
-                        rst.append(" :%s: %s\n" % (t, desc))
+                        rst.append(b" :%s: %s\n" % (t, desc))
 
         if ui.quiet:
             pass
         elif ui.verbose:
             rst.append(
-                '\n%s\n'
-                % optrst(_("global options"), commands.globalopts, ui.verbose)
+                b'\n%s\n'
+                % optrst(_(b"global options"), commands.globalopts, ui.verbose)
             )
-            if name == 'shortlist':
+            if name == b'shortlist':
                 rst.append(
-                    _("\n(use 'hg help' for the full list " "of commands)\n")
+                    _(b"\n(use 'hg help' for the full list " b"of commands)\n")
                 )
         else:
-            if name == 'shortlist':
+            if name == b'shortlist':
                 rst.append(
                     _(
-                        "\n(use 'hg help' for the full list of commands "
-                        "or 'hg -v' for details)\n"
+                        b"\n(use 'hg help' for the full list of commands "
+                        b"or 'hg -v' for details)\n"
                     )
                 )
             elif name and not full:
                 rst.append(
-                    _("\n(use 'hg help %s' to show the full help " "text)\n")
+                    _(b"\n(use 'hg help %s' to show the full help " b"text)\n")
                     % name
                 )
             elif name and syns and name in syns.keys():
                 rst.append(
                     _(
-                        "\n(use 'hg help -v -e %s' to show built-in "
-                        "aliases and global options)\n"
+                        b"\n(use 'hg help -v -e %s' to show built-in "
+                        b"aliases and global options)\n"
                     )
                     % name
                 )
             else:
                 rst.append(
                     _(
-                        "\n(use 'hg help -v%s' to show built-in aliases "
-                        "and global options)\n"
+                        b"\n(use 'hg help -v%s' to show built-in aliases "
+                        b"and global options)\n"
                     )
-                    % (name and " " + name or "")
+                    % (name and b" " + name or b"")
                 )
         return rst
 
@@ -893,20 +913,21 @@
 
         # description
         if not doc:
-            rst.append("    %s\n" % _("(no help text available)"))
+            rst.append(b"    %s\n" % _(b"(no help text available)"))
         if callable(doc):
-            rst += ["    %s\n" % l for l in doc(ui).splitlines()]
+            rst += [b"    %s\n" % l for l in doc(ui).splitlines()]
 
         if not ui.verbose:
             omitted = _(
-                '(some details hidden, use --verbose' ' to show complete help)'
+                b'(some details hidden, use --verbose'
+                b' to show complete help)'
             )
             indicateomitted(rst, omitted)
 
         try:
             cmdutil.findcmd(name, commands.table)
             rst.append(
-                _("\nuse 'hg help -c %s' to see help for " "the %s command\n")
+                _(b"\nuse 'hg help -c %s' to see help for " b"the %s command\n")
                 % (name, name)
             )
         except error.UnknownCommand:
@@ -916,25 +937,26 @@
     def helpext(name, subtopic=None):
         try:
             mod = extensions.find(name)
-            doc = gettext(pycompat.getdoc(mod)) or _('no help text available')
+            doc = gettext(pycompat.getdoc(mod)) or _(b'no help text available')
         except KeyError:
             mod = None
             doc = extensions.disabledext(name)
             if not doc:
                 raise error.UnknownCommand(name)
 
-        if '\n' not in doc:
-            head, tail = doc, ""
+        if b'\n' not in doc:
+            head, tail = doc, b""
         else:
-            head, tail = doc.split('\n', 1)
-        rst = [_('%s extension - %s\n\n') % (name.rpartition('.')[-1], head)]
+            head, tail = doc.split(b'\n', 1)
+        rst = [_(b'%s extension - %s\n\n') % (name.rpartition(b'.')[-1], head)]
         if tail:
             rst.extend(tail.splitlines(True))
-            rst.append('\n')
+            rst.append(b'\n')
 
         if not ui.verbose:
             omitted = _(
-                '(some details hidden, use --verbose' ' to show complete help)'
+                b'(some details hidden, use --verbose'
+                b' to show complete help)'
             )
             indicateomitted(rst, omitted)
 
@@ -943,70 +965,70 @@
                 ct = mod.cmdtable
             except AttributeError:
                 ct = {}
-            modcmds = {c.partition('|')[0] for c in ct}
+            modcmds = {c.partition(b'|')[0] for c in ct}
             rst.extend(helplist(modcmds.__contains__))
         else:
             rst.append(
                 _(
-                    "(use 'hg help extensions' for information on enabling"
-                    " extensions)\n"
+                    b"(use 'hg help extensions' for information on enabling"
+                    b" extensions)\n"
                 )
             )
         return rst
 
     def helpextcmd(name, subtopic=None):
         cmd, ext, doc = extensions.disabledcmd(
-            ui, name, ui.configbool('ui', 'strict')
+            ui, name, ui.configbool(b'ui', b'strict')
         )
         doc = doc.splitlines()[0]
 
         rst = listexts(
-            _("'%s' is provided by the following " "extension:") % cmd,
+            _(b"'%s' is provided by the following " b"extension:") % cmd,
             {ext: doc},
             indent=4,
             showdeprecated=True,
         )
-        rst.append('\n')
+        rst.append(b'\n')
         rst.append(
             _(
-                "(use 'hg help extensions' for information on enabling "
-                "extensions)\n"
+                b"(use 'hg help extensions' for information on enabling "
+                b"extensions)\n"
             )
         )
         return rst
 
     rst = []
-    kw = opts.get('keyword')
+    kw = opts.get(b'keyword')
     if kw or name is None and any(opts[o] for o in opts):
-        matches = topicmatch(ui, commands, name or '')
+        matches = topicmatch(ui, commands, name or b'')
         helpareas = []
-        if opts.get('extension'):
-            helpareas += [('extensions', _('Extensions'))]
-        if opts.get('command'):
-            helpareas += [('commands', _('Commands'))]
+        if opts.get(b'extension'):
+            helpareas += [(b'extensions', _(b'Extensions'))]
+        if opts.get(b'command'):
+            helpareas += [(b'commands', _(b'Commands'))]
         if not helpareas:
             helpareas = [
-                ('topics', _('Topics')),
-                ('commands', _('Commands')),
-                ('extensions', _('Extensions')),
-                ('extensioncommands', _('Extension Commands')),
+                (b'topics', _(b'Topics')),
+                (b'commands', _(b'Commands')),
+                (b'extensions', _(b'Extensions')),
+                (b'extensioncommands', _(b'Extension Commands')),
             ]
         for t, title in helpareas:
             if matches[t]:
-                rst.append('%s:\n\n' % title)
+                rst.append(b'%s:\n\n' % title)
                 rst.extend(minirst.maketable(sorted(matches[t]), 1))
-                rst.append('\n')
+                rst.append(b'\n')
         if not rst:
-            msg = _('no matches')
-            hint = _("try 'hg help' for a list of topics")
+            msg = _(b'no matches')
+            hint = _(b"try 'hg help' for a list of topics")
             raise error.Abort(msg, hint=hint)
-    elif name and name != 'shortlist':
+    elif name and name != b'shortlist':
         queries = []
         if unknowncmd:
             queries += [helpextcmd]
-        if opts.get('extension'):
+        if opts.get(b'extension'):
             queries += [helpext]
-        if opts.get('command'):
+        if opts.get(b'command'):
             queries += [helpcmd]
         if not queries:
             queries = (helptopic, helpcmd, helpext, helpextcmd)
@@ -1028,16 +1050,16 @@
                     hintname = subtopic
                 else:
                     hintname = name
-                msg = _('no such help topic: %s') % formatname
-                hint = _("try 'hg help --keyword %s'") % hintname
+                msg = _(b'no such help topic: %s') % formatname
+                hint = _(b"try 'hg help --keyword %s'") % hintname
                 raise error.Abort(msg, hint=hint)
     else:
         # program name
         if not ui.quiet:
-            rst = [_("Mercurial Distributed SCM\n"), '\n']
+            rst = [_(b"Mercurial Distributed SCM\n"), b'\n']
         rst.extend(helplist(None, **pycompat.strkwargs(opts)))
 
-    return ''.join(rst)
+    return b''.join(rst)
 
 
 def formattedhelp(
@@ -1055,14 +1077,14 @@
     # <fullname> := <name>[.<subtopic][.<section>]
     name = subtopic = section = None
     if fullname is not None:
-        nameparts = fullname.split('.')
+        nameparts = fullname.split(b'.')
         name = nameparts.pop(0)
         if nameparts and name in subtopics:
             subtopic = nameparts.pop(0)
         if nameparts:
-            section = encoding.lower('.'.join(nameparts))
+            section = encoding.lower(b'.'.join(nameparts))
 
-    textwidth = ui.configint('ui', 'textwidth')
+    textwidth = ui.configint(b'ui', b'textwidth')
     termwidth = ui.termwidth() - 2
     if textwidth <= 0 or termwidth < textwidth:
         textwidth = termwidth
@@ -1078,10 +1100,10 @@
     )
 
     blocks, pruned = minirst.parse(text, keep=keep)
-    if 'verbose' in pruned:
-        keep.append('omitted')
+    if b'verbose' in pruned:
+        keep.append(b'omitted')
     else:
-        keep.append('notomitted')
+        keep.append(b'notomitted')
     blocks, pruned = minirst.parse(text, keep=keep)
     if section:
         blocks = minirst.filtersections(blocks, section)
@@ -1090,6 +1112,6 @@
     # to look for, or we could have simply failed to found "foo.bar"
     # because bar isn't a section of foo
     if section and not (blocks and name):
-        raise error.Abort(_("help section not found: %s") % fullname)
+        raise error.Abort(_(b"help section not found: %s") % fullname)
 
     return minirst.formatplain(blocks, textwidth)
--- a/mercurial/hg.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/hg.py	Sun Oct 06 09:48:39 2019 -0400
@@ -53,7 +53,7 @@
 release = lock.release
 
 # shared features
-sharedbookmarks = 'bookmarks'
+sharedbookmarks = b'bookmarks'
 
 
 def _local(path):
@@ -64,7 +64,7 @@
     # Python 2 raises TypeError, Python 3 ValueError.
     except (TypeError, ValueError) as e:
         raise error.Abort(
-            _('invalid path %s: %s') % (path, pycompat.bytestr(e))
+            _(b'invalid path %s: %s') % (path, pycompat.bytestr(e))
         )
 
     return isfile and bundlerepo or localrepo
@@ -85,19 +85,19 @@
     else:
         revs = []
 
-    if not peer.capable('branchmap'):
+    if not peer.capable(b'branchmap'):
         if branches:
-            raise error.Abort(_("remote branch lookup not supported"))
+            raise error.Abort(_(b"remote branch lookup not supported"))
         revs.append(hashbranch)
         return revs, revs[0]
 
     with peer.commandexecutor() as e:
-        branchmap = e.callcommand('branchmap', {}).result()
+        branchmap = e.callcommand(b'branchmap', {}).result()
 
     def primary(branch):
-        if branch == '.':
+        if branch == b'.':
             if not lrepo:
-                raise error.Abort(_("dirstate branch not accessible"))
+                raise error.Abort(_(b"dirstate branch not accessible"))
             branch = lrepo.dirstate.branch()
         if branch in branchmap:
             revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
@@ -107,7 +107,7 @@
 
     for branch in branches:
         if not primary(branch):
-            raise error.RepoLookupError(_("unknown branch '%s'") % branch)
+            raise error.RepoLookupError(_(b"unknown branch '%s'") % branch)
     if hashbranch:
         if not primary(hashbranch):
             revs.append(hashbranch)
@@ -126,26 +126,26 @@
 
 
 schemes = {
-    'bundle': bundlerepo,
-    'union': unionrepo,
-    'file': _local,
-    'http': httppeer,
-    'https': httppeer,
-    'ssh': sshpeer,
-    'static-http': statichttprepo,
+    b'bundle': bundlerepo,
+    b'union': unionrepo,
+    b'file': _local,
+    b'http': httppeer,
+    b'https': httppeer,
+    b'ssh': sshpeer,
+    b'static-http': statichttprepo,
 }
 
 
 def _peerlookup(path):
     u = util.url(path)
-    scheme = u.scheme or 'file'
-    thing = schemes.get(scheme) or schemes['file']
+    scheme = u.scheme or b'file'
+    thing = schemes.get(scheme) or schemes[b'file']
     try:
         return thing(path)
     except TypeError:
         # we can't test callable(thing) because 'thing' can be an unloaded
         # module that implements __call__
-        if not util.safehasattr(thing, 'instance'):
+        if not util.safehasattr(thing, b'instance'):
             raise
         return thing
 
@@ -164,7 +164,7 @@
     '''open path with open if local, url.open if remote'''
     pathurl = util.url(path, parsequery=False, parsefragment=False)
     if pathurl.islocal():
-        return util.posixfile(pathurl.localpath(), 'rb')
+        return util.posixfile(pathurl.localpath(), b'rb')
     else:
         return url.open(ui, path, sendaccept=sendaccept)
 
@@ -184,12 +184,12 @@
     for f in presetupfuncs or []:
         f(ui, obj)
     ui.log(b'extension', b'- executing reposetup hooks\n')
-    with util.timedcm('all reposetup') as allreposetupstats:
+    with util.timedcm(b'all reposetup') as allreposetupstats:
         for name, module in extensions.extensions(ui):
             ui.log(b'extension', b'  - running reposetup for %s\n', name)
             hook = getattr(module, 'reposetup', None)
             if hook:
-                with util.timedcm('reposetup %r', name) as stats:
+                with util.timedcm(b'reposetup %r', name) as stats:
                     hook(ui, obj)
                 ui.log(
                     b'extension', b'  > reposetup for %s took %s\n', name, stats
@@ -202,7 +202,12 @@
 
 
 def repository(
-    ui, path='', create=False, presetupfuncs=None, intents=None, createopts=None
+    ui,
+    path=b'',
+    create=False,
+    presetupfuncs=None,
+    intents=None,
+    createopts=None,
 ):
     """return a repository object for the specified path"""
     peer = _peerorrepo(
@@ -216,9 +221,9 @@
     repo = peer.local()
     if not repo:
         raise error.Abort(
-            _("repository '%s' is not local") % (path or peer.url())
+            _(b"repository '%s' is not local") % (path or peer.url())
         )
-    return repo.filtered('visible')
+    return repo.filtered(b'visible')
 
 
 def peer(uiorrepo, opts, path, create=False, intents=None, createopts=None):
@@ -247,7 +252,7 @@
     '''
     path = util.url(source).path
     if not path:
-        return ''
+        return b''
     return os.path.basename(os.path.normpath(path))
 
 
@@ -259,7 +264,7 @@
     if repo.sharedpath == repo.path:
         return None
 
-    if util.safehasattr(repo, 'srcrepo') and repo.srcrepo:
+    if util.safehasattr(repo, b'srcrepo') and repo.srcrepo:
         return repo.srcrepo
 
     # the sharedpath always ends in the .hg; we want the path to the repo
@@ -282,7 +287,7 @@
     '''create a shared repository'''
 
     if not islocal(source):
-        raise error.Abort(_('can only share local repositories'))
+        raise error.Abort(_(b'can only share local repositories'))
 
     if not dest:
         dest = defaultdest(source)
@@ -307,9 +312,9 @@
         dest,
         create=True,
         createopts={
-            'sharedrepo': srcrepo,
-            'sharedrelative': relative,
-            'shareditems': shareditems,
+            b'sharedrepo': srcrepo,
+            b'sharedrelative': relative,
+            b'shareditems': shareditems,
         },
     )
 
@@ -338,11 +343,11 @@
         destlock = copystore(ui, repo, repo.path)
         with destlock or util.nullcontextmanager():
 
-            sharefile = repo.vfs.join('sharedpath')
-            util.rename(sharefile, sharefile + '.old')
+            sharefile = repo.vfs.join(b'sharedpath')
+            util.rename(sharefile, sharefile + b'.old')
 
-            repo.requirements.discard('shared')
-            repo.requirements.discard('relshared')
+            repo.requirements.discard(b'shared')
+            repo.requirements.discard(b'relshared')
             repo._writerequirements()
 
     # Removing share changes some fundamental properties of the repo instance.
@@ -352,7 +357,7 @@
 
     # TODO: figure out how to access subrepos that exist, but were previously
     #       removed from .hgsub
-    c = newrepo['.']
+    c = newrepo[b'.']
     subs = c.substate
     for s in sorted(subs):
         c.sub(s).unshare()
@@ -371,10 +376,10 @@
     Extensions can wrap this function and write additional entries to
     destrepo/.hg/shared to indicate additional pieces of data to be shared.
     """
-    default = defaultpath or sourcerepo.ui.config('paths', 'default')
+    default = defaultpath or sourcerepo.ui.config(b'paths', b'default')
     if default:
-        template = '[paths]\n' 'default = %s\n'
-        destrepo.vfs.write('hgrc', util.tonativeeol(template % default))
+        template = b'[paths]\n' b'default = %s\n'
+        destrepo.vfs.write(b'hgrc', util.tonativeeol(template % default))
     if repositorymod.NARROW_REQUIREMENT in sourcerepo.requirements:
         with destrepo.wlock():
             narrowspec.copytoworkingcopy(destrepo)
@@ -388,10 +393,10 @@
     if not update:
         return
 
-    repo.ui.status(_("updating working directory\n"))
+    repo.ui.status(_(b"updating working directory\n"))
     if update is not True:
         checkout = update
-    for test in (checkout, 'default', 'tip'):
+    for test in (checkout, b'default', b'tip'):
         if test is None:
             continue
         try:
@@ -410,22 +415,22 @@
     destlock = None
     try:
         hardlink = None
-        topic = _('linking') if hardlink else _('copying')
-        with ui.makeprogress(topic, unit=_('files')) as progress:
+        topic = _(b'linking') if hardlink else _(b'copying')
+        with ui.makeprogress(topic, unit=_(b'files')) as progress:
             num = 0
             srcpublishing = srcrepo.publishing()
             srcvfs = vfsmod.vfs(srcrepo.sharedpath)
             dstvfs = vfsmod.vfs(destpath)
             for f in srcrepo.store.copylist():
-                if srcpublishing and f.endswith('phaseroots'):
+                if srcpublishing and f.endswith(b'phaseroots'):
                     continue
                 dstbase = os.path.dirname(f)
                 if dstbase and not dstvfs.exists(dstbase):
                     dstvfs.mkdir(dstbase)
                 if srcvfs.exists(f):
-                    if f.endswith('data'):
+                    if f.endswith(b'data'):
                         # 'dstbase' may be empty (e.g. revlog format 0)
-                        lockfile = os.path.join(dstbase, "lock")
+                        lockfile = os.path.join(dstbase, b"lock")
                         # lock to avoid premature writing to the target
                         destlock = lock.lock(dstvfs, lockfile)
                     hardlink, n = util.copyfiles(
@@ -433,9 +438,9 @@
                     )
                     num += n
             if hardlink:
-                ui.debug("linked %d files\n" % num)
+                ui.debug(b"linked %d files\n" % num)
             else:
-                ui.debug("copied %d files\n" % num)
+                ui.debug(b"copied %d files\n" % num)
         return destlock
     except:  # re-raises
         release(destlock)
@@ -463,12 +468,12 @@
     """
     revs = None
     if rev:
-        if not srcpeer.capable('lookup'):
+        if not srcpeer.capable(b'lookup'):
             raise error.Abort(
                 _(
-                    "src repository does not support "
-                    "revision lookup and so doesn't "
-                    "support clone by revision"
+                    b"src repository does not support "
+                    b"revision lookup and so doesn't "
+                    b"support clone by revision"
                 )
             )
 
@@ -476,7 +481,9 @@
         remoterevs = []
         for r in rev:
             with srcpeer.commandexecutor() as e:
-                remoterevs.append(e.callcommand('lookup', {'key': r,}).result())
+                remoterevs.append(
+                    e.callcommand(b'lookup', {b'key': r,}).result()
+                )
         revs = remoterevs
 
     # Obtain a lock before checking for or cloning the pooled repo otherwise
@@ -492,13 +499,15 @@
     poolvfs = vfsmod.vfs(pooldir)
     basename = os.path.basename(sharepath)
 
-    with lock.lock(poolvfs, '%s.lock' % basename):
+    with lock.lock(poolvfs, b'%s.lock' % basename):
         if os.path.exists(sharepath):
             ui.status(
-                _('(sharing from existing pooled repository %s)\n') % basename
+                _(b'(sharing from existing pooled repository %s)\n') % basename
             )
         else:
-            ui.status(_('(sharing from new pooled repository %s)\n') % basename)
+            ui.status(
+                _(b'(sharing from new pooled repository %s)\n') % basename
+            )
             # Always use pull mode because hardlinks in share mode don't work
             # well. Never update because working copies aren't necessary in
             # share mode.
@@ -545,7 +554,7 @@
 # so just copy it
 def _copycache(srcrepo, dstcachedir, fname):
     """copy a cache from srcrepo to destcachedir (if it exists)"""
-    srcbranchcache = srcrepo.vfs.join('cache/%s' % fname)
+    srcbranchcache = srcrepo.vfs.join(b'cache/%s' % fname)
     dstbranchcache = os.path.join(dstcachedir, fname)
     if os.path.exists(srcbranchcache):
         if not os.path.exists(dstcachedir):
@@ -631,7 +640,7 @@
     if dest is None:
         dest = defaultdest(source)
         if dest:
-            ui.status(_("destination directory: %s\n") % dest)
+            ui.status(_(b"destination directory: %s\n") % dest)
     else:
         dest = ui.expandpath(dest)
 
@@ -639,14 +648,14 @@
     source = util.urllocalpath(source)
 
     if not dest:
-        raise error.Abort(_("empty destination path is not valid"))
+        raise error.Abort(_(b"empty destination path is not valid"))
 
     destvfs = vfsmod.vfs(dest, expandpath=True)
     if destvfs.lexists():
         if not destvfs.isdir():
-            raise error.Abort(_("destination '%s' already exists") % dest)
+            raise error.Abort(_(b"destination '%s' already exists") % dest)
         elif destvfs.listdir():
-            raise error.Abort(_("destination '%s' is not empty") % dest)
+            raise error.Abort(_(b"destination '%s' is not empty") % dest)
 
     createopts = {}
     narrow = False
@@ -662,12 +671,12 @@
     if narrow:
         # Include everything by default if only exclusion patterns defined.
         if storeexcludepats and not storeincludepats:
-            storeincludepats = {'path:.'}
+            storeincludepats = {b'path:.'}
 
-        createopts['narrowfiles'] = True
+        createopts[b'narrowfiles'] = True
 
     if depth:
-        createopts['shallowfilestore'] = True
+        createopts[b'shallowfilestore'] = True
 
     if srcpeer.capable(b'lfs-serve'):
         # Repository creation honors the config if it disabled the extension, so
@@ -677,65 +686,67 @@
         # requirement is added.  If the extension is explicitly disabled but the
         # requirement is set, the clone aborts early, before transferring any
         # data.
-        createopts['lfs'] = True
+        createopts[b'lfs'] = True
 
-        if extensions.disabledext('lfs'):
+        if extensions.disabledext(b'lfs'):
             ui.status(
                 _(
-                    '(remote is using large file support (lfs), but it is '
-                    'explicitly disabled in the local configuration)\n'
+                    b'(remote is using large file support (lfs), but it is '
+                    b'explicitly disabled in the local configuration)\n'
                 )
             )
         else:
             ui.status(
                 _(
-                    '(remote is using large file support (lfs); lfs will '
-                    'be enabled for this repository)\n'
+                    b'(remote is using large file support (lfs); lfs will '
+                    b'be enabled for this repository)\n'
                 )
             )
 
     shareopts = shareopts or {}
-    sharepool = shareopts.get('pool')
-    sharenamemode = shareopts.get('mode')
+    sharepool = shareopts.get(b'pool')
+    sharenamemode = shareopts.get(b'mode')
     if sharepool and islocal(dest):
         sharepath = None
-        if sharenamemode == 'identity':
+        if sharenamemode == b'identity':
             # Resolve the name from the initial changeset in the remote
             # repository. This returns nullid when the remote is empty. It
             # raises RepoLookupError if revision 0 is filtered or otherwise
             # not available. If we fail to resolve, sharing is not enabled.
             try:
                 with srcpeer.commandexecutor() as e:
-                    rootnode = e.callcommand('lookup', {'key': '0',}).result()
+                    rootnode = e.callcommand(
+                        b'lookup', {b'key': b'0',}
+                    ).result()
 
                 if rootnode != node.nullid:
                     sharepath = os.path.join(sharepool, node.hex(rootnode))
                 else:
                     ui.status(
                         _(
-                            '(not using pooled storage: '
-                            'remote appears to be empty)\n'
+                            b'(not using pooled storage: '
+                            b'remote appears to be empty)\n'
                         )
                     )
             except error.RepoLookupError:
                 ui.status(
                     _(
-                        '(not using pooled storage: '
-                        'unable to resolve identity of remote)\n'
+                        b'(not using pooled storage: '
+                        b'unable to resolve identity of remote)\n'
                     )
                 )
-        elif sharenamemode == 'remote':
+        elif sharenamemode == b'remote':
             sharepath = os.path.join(
                 sharepool, node.hex(hashlib.sha1(source).digest())
             )
         else:
             raise error.Abort(
-                _('unknown share naming mode: %s') % sharenamemode
+                _(b'unknown share naming mode: %s') % sharenamemode
             )
 
         # TODO this is a somewhat arbitrary restriction.
         if narrow:
-            ui.status(_('(pooled storage not supported for narrow clones)\n'))
+            ui.status(_(b'(pooled storage not supported for narrow clones)\n'))
             sharepath = None
 
         if sharepath:
@@ -786,8 +797,8 @@
                 copy = False
 
         if copy:
-            srcrepo.hook('preoutgoing', throw=True, source='clone')
-            hgdir = os.path.realpath(os.path.join(dest, ".hg"))
+            srcrepo.hook(b'preoutgoing', throw=True, source=b'clone')
+            hgdir = os.path.realpath(os.path.join(dest, b".hg"))
             if not os.path.exists(dest):
                 util.makedirs(dest)
             else:
@@ -800,25 +811,27 @@
                 if inst.errno == errno.EEXIST:
                     cleandir = None
                     raise error.Abort(
-                        _("destination '%s' already exists") % dest
+                        _(b"destination '%s' already exists") % dest
                     )
                 raise
 
             destlock = copystore(ui, srcrepo, destpath)
             # copy bookmarks over
-            srcbookmarks = srcrepo.vfs.join('bookmarks')
-            dstbookmarks = os.path.join(destpath, 'bookmarks')
+            srcbookmarks = srcrepo.vfs.join(b'bookmarks')
+            dstbookmarks = os.path.join(destpath, b'bookmarks')
             if os.path.exists(srcbookmarks):
                 util.copyfile(srcbookmarks, dstbookmarks)
 
-            dstcachedir = os.path.join(destpath, 'cache')
+            dstcachedir = os.path.join(destpath, b'cache')
             for cache in cacheutil.cachetocopy(srcrepo):
                 _copycache(srcrepo, dstcachedir, cache)
 
             # we need to re-init the repo after manually copying the data
             # into it
             destpeer = peer(srcrepo, peeropts, dest)
-            srcrepo.hook('outgoing', source='clone', node=node.hex(node.nullid))
+            srcrepo.hook(
+                b'outgoing', source=b'clone', node=node.hex(node.nullid)
+            )
         else:
             try:
                 # only pass ui when no srcrepo
@@ -833,17 +846,17 @@
                 if inst.errno == errno.EEXIST:
                     cleandir = None
                     raise error.Abort(
-                        _("destination '%s' already exists") % dest
+                        _(b"destination '%s' already exists") % dest
                     )
                 raise
 
             if revs:
-                if not srcpeer.capable('lookup'):
+                if not srcpeer.capable(b'lookup'):
                     raise error.Abort(
                         _(
-                            "src repository does not support "
-                            "revision lookup and so doesn't "
-                            "support clone by revision"
+                            b"src repository does not support "
+                            b"revision lookup and so doesn't "
+                            b"support clone by revision"
                         )
                     )
 
@@ -852,7 +865,7 @@
                 for rev in revs:
                     with srcpeer.commandexecutor() as e:
                         remoterevs.append(
-                            e.callcommand('lookup', {'key': rev,}).result()
+                            e.callcommand(b'lookup', {b'key': rev,}).result()
                         )
                 revs = remoterevs
 
@@ -868,15 +881,15 @@
 
                 u = util.url(abspath)
                 defaulturl = bytes(u)
-                local.ui.setconfig('paths', 'default', defaulturl, 'clone')
+                local.ui.setconfig(b'paths', b'default', defaulturl, b'clone')
                 if not stream:
                     if pull:
                         stream = False
                     else:
                         stream = None
                 # internal config: ui.quietbookmarkmove
-                overrides = {('ui', 'quietbookmarkmove'): True}
-                with local.ui.configoverride(overrides, 'clone'):
+                overrides = {(b'ui', b'quietbookmarkmove'): True}
+                with local.ui.configoverride(overrides, b'clone'):
                     exchange.pull(
                         local,
                         srcpeer,
@@ -892,8 +905,8 @@
                 if narrow:
                     raise error.Abort(
                         _(
-                            'narrow clone not available for '
-                            'remote destinations'
+                            b'narrow clone not available for '
+                            b'remote destinations'
                         )
                     )
 
@@ -905,28 +918,28 @@
                 )
             else:
                 raise error.Abort(
-                    _("clone from remote to remote not supported")
+                    _(b"clone from remote to remote not supported")
                 )
 
         cleandir = None
 
         destrepo = destpeer.local()
         if destrepo:
-            template = uimod.samplehgrcs['cloned']
+            template = uimod.samplehgrcs[b'cloned']
             u = util.url(abspath)
             u.passwd = None
             defaulturl = bytes(u)
-            destrepo.vfs.write('hgrc', util.tonativeeol(template % defaulturl))
-            destrepo.ui.setconfig('paths', 'default', defaulturl, 'clone')
+            destrepo.vfs.write(b'hgrc', util.tonativeeol(template % defaulturl))
+            destrepo.ui.setconfig(b'paths', b'default', defaulturl, b'clone')
 
-            if ui.configbool('experimental', 'remotenames'):
+            if ui.configbool(b'experimental', b'remotenames'):
                 logexchange.pullremotenames(destrepo, srcpeer)
 
             if update:
                 if update is not True:
                     with srcpeer.commandexecutor() as e:
                         checkout = e.callcommand(
-                            'lookup', {'key': update,}
+                            b'lookup', {b'key': update,}
                         ).result()
 
                 uprev = None
@@ -948,23 +961,23 @@
                                 pass
                 if uprev is None:
                     try:
-                        uprev = destrepo._bookmarks['@']
-                        update = '@'
+                        uprev = destrepo._bookmarks[b'@']
+                        update = b'@'
                         bn = destrepo[uprev].branch()
-                        if bn == 'default':
-                            status = _("updating to bookmark @\n")
+                        if bn == b'default':
+                            status = _(b"updating to bookmark @\n")
                         else:
                             status = (
-                                _("updating to bookmark @ on branch %s\n") % bn
+                                _(b"updating to bookmark @ on branch %s\n") % bn
                             )
                     except KeyError:
                         try:
-                            uprev = destrepo.branchtip('default')
+                            uprev = destrepo.branchtip(b'default')
                         except error.RepoLookupError:
-                            uprev = destrepo.lookup('tip')
+                            uprev = destrepo.lookup(b'tip')
                 if not status:
                     bn = destrepo[uprev].branch()
-                    status = _("updating to branch %s\n") % bn
+                    status = _(b"updating to branch %s\n") % bn
                 destrepo.ui.status(status)
                 _update(destrepo, uprev)
                 if update in destrepo._bookmarks:
@@ -983,8 +996,8 @@
         return
     repo.ui.status(
         _(
-            "%d files updated, %d files merged, "
-            "%d files removed, %d files unresolved\n"
+            b"%d files updated, %d files merged, "
+            b"%d files removed, %d files unresolved\n"
         )
         % (
             stats.updatedcount,
@@ -1006,7 +1019,7 @@
         node,
         branchmerge=False,
         force=overwrite,
-        labels=['working copy', 'destination'],
+        labels=[b'working copy', b'destination'],
         updatecheck=updatecheck,
     )
 
@@ -1016,7 +1029,7 @@
     stats = updaterepo(repo, node, False, updatecheck=updatecheck)
     _showstats(repo, stats, quietempty)
     if stats.unresolvedcount:
-        repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
+        repo.ui.status(_(b"use 'hg resolve' to retry unresolved file merges\n"))
     return stats.unresolvedcount > 0
 
 
@@ -1027,7 +1040,7 @@
 def clean(repo, node, show_stats=True, quietempty=False):
     """forcibly switch the working directory to node, clobbering changes"""
     stats = updaterepo(repo, node, True)
-    repo.vfs.unlinkpath('graftstate', ignoremissing=True)
+    repo.vfs.unlinkpath(b'graftstate', ignoremissing=True)
     if show_stats:
         _showstats(repo, stats, quietempty)
     return stats.unresolvedcount > 0
@@ -1071,7 +1084,7 @@
     This returns whether conflict is detected at updating or not.
     """
     if updatecheck is None:
-        updatecheck = ui.config('commands', 'update.check')
+        updatecheck = ui.config(b'commands', b'update.check')
         if updatecheck not in _VALID_UPDATECHECKS:
             # If not configured, or invalid value configured
             updatecheck = mergemod.UPDATECHECK_LINEAR
@@ -1097,25 +1110,25 @@
             ret = _update(repo, checkout, updatecheck=updatecheck)
 
         if not ret and movemarkfrom:
-            if movemarkfrom == repo['.'].node():
+            if movemarkfrom == repo[b'.'].node():
                 pass  # no-op update
-            elif bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
-                b = ui.label(repo._activebookmark, 'bookmarks.active')
-                ui.status(_("updating bookmark %s\n") % b)
+            elif bookmarks.update(repo, [movemarkfrom], repo[b'.'].node()):
+                b = ui.label(repo._activebookmark, b'bookmarks.active')
+                ui.status(_(b"updating bookmark %s\n") % b)
             else:
                 # this can happen with a non-linear update
-                b = ui.label(repo._activebookmark, 'bookmarks')
-                ui.status(_("(leaving bookmark %s)\n") % b)
+                b = ui.label(repo._activebookmark, b'bookmarks')
+                ui.status(_(b"(leaving bookmark %s)\n") % b)
                 bookmarks.deactivate(repo)
         elif brev in repo._bookmarks:
             if brev != repo._activebookmark:
-                b = ui.label(brev, 'bookmarks.active')
-                ui.status(_("(activating bookmark %s)\n") % b)
+                b = ui.label(brev, b'bookmarks.active')
+                ui.status(_(b"(activating bookmark %s)\n") % b)
             bookmarks.activate(repo, brev)
         elif brev:
             if repo._activebookmark:
-                b = ui.label(repo._activebookmark, 'bookmarks')
-                ui.status(_("(leaving bookmark %s)\n") % b)
+                b = ui.label(repo._activebookmark, b'bookmarks')
+                ui.status(_(b"(leaving bookmark %s)\n") % b)
             bookmarks.deactivate(repo)
 
         if warndest:
@@ -1150,12 +1163,12 @@
     if stats.unresolvedcount:
         repo.ui.status(
             _(
-                "use 'hg resolve' to retry unresolved file merges "
-                "or 'hg merge --abort' to abandon\n"
+                b"use 'hg resolve' to retry unresolved file merges "
+                b"or 'hg merge --abort' to abandon\n"
             )
         )
     elif remind:
-        repo.ui.status(_("(branch merge, don't forget to commit)\n"))
+        repo.ui.status(_(b"(branch merge, don't forget to commit)\n"))
     return stats.unresolvedcount > 0
 
 
@@ -1166,10 +1179,10 @@
         node = ms.localctx.hex()
     else:
         # there were no conficts, mergestate was not stored
-        node = repo['.'].hex()
+        node = repo[b'.'].hex()
 
     repo.ui.status(
-        _("aborting the merge, updating back to" " %s\n") % node[:12]
+        _(b"aborting the merge, updating back to" b" %s\n") % node[:12]
     )
     stats = mergemod.update(repo, node, branchmerge=False, force=True)
     _showstats(repo, stats)
@@ -1185,21 +1198,21 @@
         (remoterepo, incomingchangesetlist, displayer) parameters,
     and is supposed to contain only code that can't be unified.
     """
-    source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
+    source, branches = parseurl(ui.expandpath(source), opts.get(b'branch'))
     other = peer(repo, opts, source)
-    ui.status(_('comparing with %s\n') % util.hidepassword(source))
-    revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
+    ui.status(_(b'comparing with %s\n') % util.hidepassword(source))
+    revs, checkout = addbranchrevs(repo, other, branches, opts.get(b'rev'))
 
     if revs:
         revs = [other.lookup(rev) for rev in revs]
     other, chlist, cleanupfn = bundlerepo.getremotechanges(
-        ui, repo, other, revs, opts["bundle"], opts["force"]
+        ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
     )
     try:
         if not chlist:
-            ui.status(_("no changes found\n"))
+            ui.status(_(b"no changes found\n"))
             return subreporecurse()
-        ui.pager('incoming')
+        ui.pager(b'incoming')
         displayer = logcmdutil.changesetdisplayer(
             ui, other, opts, buffered=buffered
         )
@@ -1214,7 +1227,7 @@
 def incoming(ui, repo, source, opts):
     def subreporecurse():
         ret = 1
-        if opts.get('subrepos'):
+        if opts.get(b'subrepos'):
             ctx = repo[None]
             for subpath in sorted(ctx.substate):
                 sub = ctx.sub(subpath)
@@ -1223,14 +1236,14 @@
 
     def display(other, chlist, displayer):
         limit = logcmdutil.getlimit(opts)
-        if opts.get('newest_first'):
+        if opts.get(b'newest_first'):
             chlist.reverse()
         count = 0
         for n in chlist:
             if limit is not None and count >= limit:
                 break
             parents = [p for p in other.changelog.parents(n) if p != nullid]
-            if opts.get('no_merges') and len(parents) == 2:
+            if opts.get(b'no_merges') and len(parents) == 2:
                 continue
             count += 1
             displayer.show(other[n])
@@ -1239,23 +1252,23 @@
 
 
 def _outgoing(ui, repo, dest, opts):
-    path = ui.paths.getpath(dest, default=('default-push', 'default'))
+    path = ui.paths.getpath(dest, default=(b'default-push', b'default'))
     if not path:
         raise error.Abort(
-            _('default repository not configured!'),
-            hint=_("see 'hg help config.paths'"),
+            _(b'default repository not configured!'),
+            hint=_(b"see 'hg help config.paths'"),
         )
     dest = path.pushloc or path.loc
-    branches = path.branch, opts.get('branch') or []
+    branches = path.branch, opts.get(b'branch') or []
 
-    ui.status(_('comparing with %s\n') % util.hidepassword(dest))
-    revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
+    ui.status(_(b'comparing with %s\n') % util.hidepassword(dest))
+    revs, checkout = addbranchrevs(repo, repo, branches, opts.get(b'rev'))
     if revs:
         revs = [repo[rev].node() for rev in scmutil.revrange(repo, revs)]
 
     other = peer(repo, opts, dest)
     outgoing = discovery.findcommonoutgoing(
-        repo, other, revs, force=opts.get('force')
+        repo, other, revs, force=opts.get(b'force')
     )
     o = outgoing.missing
     if not o:
@@ -1266,7 +1279,7 @@
 def outgoing(ui, repo, dest, opts):
     def recurse():
         ret = 1
-        if opts.get('subrepos'):
+        if opts.get(b'subrepos'):
             ctx = repo[None]
             for subpath in sorted(ctx.substate):
                 sub = ctx.sub(subpath)
@@ -1279,16 +1292,16 @@
         cmdutil.outgoinghooks(ui, repo, other, opts, o)
         return recurse()
 
-    if opts.get('newest_first'):
+    if opts.get(b'newest_first'):
         o.reverse()
-    ui.pager('outgoing')
+    ui.pager(b'outgoing')
     displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
     count = 0
     for n in o:
         if limit is not None and count >= limit:
             break
         parents = [p for p in repo.changelog.parents(n) if p != nullid]
-        if opts.get('no_merges') and len(parents) == 2:
+        if opts.get(b'no_merges') and len(parents) == 2:
             continue
         count += 1
         displayer.show(repo[n])
@@ -1308,11 +1321,11 @@
 
     # pathto() is needed for -R case
     revs = repo.revs(
-        "filelog(%s)", util.pathto(repo.root, repo.getcwd(), '.hgsubstate')
+        b"filelog(%s)", util.pathto(repo.root, repo.getcwd(), b'.hgsubstate')
     )
 
     if revs:
-        repo.ui.status(_('checking subrepo links\n'))
+        repo.ui.status(_(b'checking subrepo links\n'))
         for rev in revs:
             ctx = repo[rev]
             try:
@@ -1322,10 +1335,10 @@
                             ctx.sub(subpath, allowcreate=False).verify() or ret
                         )
                     except error.RepoError as e:
-                        repo.ui.warn('%d: %s\n' % (rev, e))
+                        repo.ui.warn(b'%d: %s\n' % (rev, e))
             except Exception:
                 repo.ui.warn(
-                    _('.hgsubstate is corrupt in revision %s\n')
+                    _(b'.hgsubstate is corrupt in revision %s\n')
                     % node.short(ctx.node())
                 )
 
@@ -1333,31 +1346,31 @@
 
 
 def remoteui(src, opts):
-    'build a remote ui from ui or repo and opts'
-    if util.safehasattr(src, 'baseui'):  # looks like a repository
+    b'build a remote ui from ui or repo and opts'
+    if util.safehasattr(src, b'baseui'):  # looks like a repository
         dst = src.baseui.copy()  # drop repo-specific config
         src = src.ui  # copy target options from repo
     else:  # assume it's a global ui object
         dst = src.copy()  # keep all global options
 
     # copy ssh-specific options
-    for o in 'ssh', 'remotecmd':
-        v = opts.get(o) or src.config('ui', o)
+    for o in b'ssh', b'remotecmd':
+        v = opts.get(o) or src.config(b'ui', o)
         if v:
-            dst.setconfig("ui", o, v, 'copied')
+            dst.setconfig(b"ui", o, v, b'copied')
 
     # copy bundle-specific options
-    r = src.config('bundle', 'mainreporoot')
+    r = src.config(b'bundle', b'mainreporoot')
     if r:
-        dst.setconfig('bundle', 'mainreporoot', r, 'copied')
+        dst.setconfig(b'bundle', b'mainreporoot', r, b'copied')
 
     # copy selected local settings to the remote ui
-    for sect in ('auth', 'hostfingerprints', 'hostsecurity', 'http_proxy'):
+    for sect in (b'auth', b'hostfingerprints', b'hostsecurity', b'http_proxy'):
         for key, val in src.configitems(sect):
-            dst.setconfig(sect, key, val, 'copied')
-    v = src.config('web', 'cacerts')
+            dst.setconfig(sect, key, val, b'copied')
+    v = src.config(b'web', b'cacerts')
     if v:
-        dst.setconfig('web', 'cacerts', util.expandpath(v), 'copied')
+        dst.setconfig(b'web', b'cacerts', util.expandpath(v), b'copied')
 
     return dst
 
@@ -1366,10 +1379,10 @@
 # Used to check if the repository has changed looking at mtime and size of
 # these files.
 foi = [
-    ('spath', '00changelog.i'),
-    ('spath', 'phaseroots'),  # ! phase can change content at the same size
-    ('spath', 'obsstore'),
-    ('path', 'bookmarks'),  # ! bookmark can change content at the same size
+    (b'spath', b'00changelog.i'),
+    (b'spath', b'phaseroots'),  # ! phase can change content at the same size
+    (b'spath', b'obsstore'),
+    (b'path', b'bookmarks'),  # ! bookmark can change content at the same size
 ]
 
 
--- a/mercurial/hgweb/__init__.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/hgweb/__init__.py	Sun Oct 06 09:48:39 2019 -0400
@@ -39,7 +39,7 @@
 
     if isinstance(config, pycompat.unicode):
         raise error.ProgrammingError(
-            'Mercurial only supports encoded strings: %r' % config
+            b'Mercurial only supports encoded strings: %r' % config
         )
     if (
         (isinstance(config, bytes) and not os.path.isdir(config))
@@ -66,16 +66,16 @@
         self.httpd = server.create_server(self.ui, self.app)
 
         if (
-            self.opts['port']
+            self.opts[b'port']
             and not self.ui.verbose
-            and not self.opts['print_url']
+            and not self.opts[b'print_url']
         ):
             return
 
         if self.httpd.prefix:
-            prefix = self.httpd.prefix.strip('/') + '/'
+            prefix = self.httpd.prefix.strip(b'/') + b'/'
         else:
-            prefix = ''
+            prefix = b''
 
         port = r':%d' % self.httpd.port
         if port == r':80':
@@ -91,20 +91,20 @@
         if r':' in fqaddr:
             fqaddr = r'[%s]' % fqaddr
 
-        url = 'http://%s%s/%s' % (
+        url = b'http://%s%s/%s' % (
             pycompat.sysbytes(fqaddr),
             pycompat.sysbytes(port),
             prefix,
         )
-        if self.opts['print_url']:
-            self.ui.write('%s\n' % url)
+        if self.opts[b'print_url']:
+            self.ui.write(b'%s\n' % url)
         else:
-            if self.opts['port']:
+            if self.opts[b'port']:
                 write = self.ui.status
             else:
                 write = self.ui.write
             write(
-                _('listening at %s (bound to %s:%d)\n')
+                _(b'listening at %s (bound to %s:%d)\n')
                 % (url, pycompat.sysbytes(bindaddr), self.httpd.port)
             )
         self.ui.flush()  # avoid buffering of status message
@@ -119,6 +119,6 @@
     else:
         if not repo:
             raise error.RepoError(
-                _("there is no Mercurial repository" " here (.hg not found)")
+                _(b"there is no Mercurial repository" b" here (.hg not found)")
             )
         return hgweb_mod.hgweb(repo, baseui=baseui)
--- a/mercurial/hgweb/common.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/hgweb/common.py	Sun Oct 06 09:48:39 2019 -0400
@@ -42,7 +42,7 @@
     Can be overridden by extensions to provide more complex authorization
     schemes.
     """
-    return userlist == ['*'] or username in userlist
+    return userlist == [b'*'] or username in userlist
 
 
 def checkauthz(hgweb, req, op):
@@ -52,41 +52,41 @@
 
     user = req.remoteuser
 
-    deny_read = hgweb.configlist('web', 'deny_read')
+    deny_read = hgweb.configlist(b'web', b'deny_read')
     if deny_read and (not user or ismember(hgweb.repo.ui, user, deny_read)):
-        raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized')
+        raise ErrorResponse(HTTP_UNAUTHORIZED, b'read not authorized')
 
-    allow_read = hgweb.configlist('web', 'allow_read')
+    allow_read = hgweb.configlist(b'web', b'allow_read')
     if allow_read and (not ismember(hgweb.repo.ui, user, allow_read)):
-        raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized')
+        raise ErrorResponse(HTTP_UNAUTHORIZED, b'read not authorized')
 
-    if op == 'pull' and not hgweb.allowpull:
-        raise ErrorResponse(HTTP_UNAUTHORIZED, 'pull not authorized')
-    elif op == 'pull' or op is None:  # op is None for interface requests
+    if op == b'pull' and not hgweb.allowpull:
+        raise ErrorResponse(HTTP_UNAUTHORIZED, b'pull not authorized')
+    elif op == b'pull' or op is None:  # op is None for interface requests
         return
 
     # Allow LFS uploading via PUT requests
-    if op == 'upload':
-        if req.method != 'PUT':
-            msg = 'upload requires PUT request'
+    if op == b'upload':
+        if req.method != b'PUT':
+            msg = b'upload requires PUT request'
             raise ErrorResponse(HTTP_METHOD_NOT_ALLOWED, msg)
     # enforce that you can only push using POST requests
-    elif req.method != 'POST':
-        msg = 'push requires POST request'
+    elif req.method != b'POST':
+        msg = b'push requires POST request'
         raise ErrorResponse(HTTP_METHOD_NOT_ALLOWED, msg)
 
     # require ssl by default for pushing, auth info cannot be sniffed
     # and replayed
-    if hgweb.configbool('web', 'push_ssl') and req.urlscheme != 'https':
-        raise ErrorResponse(HTTP_FORBIDDEN, 'ssl required')
+    if hgweb.configbool(b'web', b'push_ssl') and req.urlscheme != b'https':
+        raise ErrorResponse(HTTP_FORBIDDEN, b'ssl required')
 
-    deny = hgweb.configlist('web', 'deny_push')
+    deny = hgweb.configlist(b'web', b'deny_push')
     if deny and (not user or ismember(hgweb.repo.ui, user, deny)):
-        raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized')
+        raise ErrorResponse(HTTP_UNAUTHORIZED, b'push not authorized')
 
-    allow = hgweb.configlist('web', 'allow-push')
+    allow = hgweb.configlist(b'web', b'allow-push')
     if not (allow and ismember(hgweb.repo.ui, user, allow)):
-        raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized')
+        raise ErrorResponse(HTTP_UNAUTHORIZED, b'push not authorized')
 
 
 # Hooks for hgweb permission checks; extensions can add hooks here.
@@ -128,11 +128,11 @@
     def read(self, amt=-1):
         if not self.continued:
             self.continued = True
-            self._write('HTTP/1.1 100 Continue\r\n\r\n')
+            self._write(b'HTTP/1.1 100 Continue\r\n\r\n')
         return self.f.read(amt)
 
     def __getattr__(self, attr):
-        if attr in ('close', 'readline', 'readlines', '__iter__'):
+        if attr in (b'close', b'readline', b'readlines', b'__iter__'):
             return getattr(self.f, attr)
         raise AttributeError
 
@@ -145,7 +145,7 @@
 
 
 def statusmessage(code, message=None):
-    return '%d %s' % (code, message or _statusmessage(code))
+    return b'%d %s' % (code, message or _statusmessage(code))
 
 
 def get_stat(spath, fn):
@@ -158,15 +158,15 @@
 
 
 def get_mtime(spath):
-    return get_stat(spath, "00changelog.i")[stat.ST_MTIME]
+    return get_stat(spath, b"00changelog.i")[stat.ST_MTIME]
 
 
 def ispathsafe(path):
     """Determine if a path is safe to use for filesystem access."""
-    parts = path.split('/')
+    parts = path.split(b'/')
     for part in parts:
         if (
-            part in ('', pycompat.oscurdir, pycompat.ospardir)
+            part in (b'', pycompat.oscurdir, pycompat.ospardir)
             or pycompat.ossep in part
             or pycompat.osaltsep is not None
             and pycompat.osaltsep in part
@@ -188,7 +188,7 @@
     if not ispathsafe(fname):
         return
 
-    fpath = os.path.join(*fname.split('/'))
+    fpath = os.path.join(*fname.split(b'/'))
     if isinstance(directory, str):
         directory = [directory]
     for d in directory:
@@ -200,14 +200,14 @@
         ct = pycompat.sysbytes(
             mimetypes.guess_type(pycompat.fsdecode(path))[0] or r"text/plain"
         )
-        with open(path, 'rb') as fh:
+        with open(path, b'rb') as fh:
             data = fh.read()
 
-        res.headers['Content-Type'] = ct
+        res.headers[b'Content-Type'] = ct
         res.setbodybytes(data)
         return res
     except TypeError:
-        raise ErrorResponse(HTTP_SERVER_ERROR, 'illegal filename')
+        raise ErrorResponse(HTTP_SERVER_ERROR, b'illegal filename')
     except OSError as err:
         if err.errno == errno.ENOENT:
             raise ErrorResponse(HTTP_NOT_FOUND)
@@ -241,10 +241,10 @@
     ui.username or $EMAIL as a fallback to display something useful.
     """
     return (
-        config("web", "contact")
-        or config("ui", "username")
-        or encoding.environ.get("EMAIL")
-        or ""
+        config(b"web", b"contact")
+        or config(b"ui", b"username")
+        or encoding.environ.get(b"EMAIL")
+        or b""
     )
 
 
@@ -275,11 +275,11 @@
 
     # Don't allow untrusted CSP setting since it be disable protections
     # from a trusted/global source.
-    csp = ui.config('web', 'csp', untrusted=False)
+    csp = ui.config(b'web', b'csp', untrusted=False)
     nonce = None
 
-    if csp and '%nonce%' in csp:
-        nonce = base64.urlsafe_b64encode(uuid.uuid4().bytes).rstrip('=')
-        csp = csp.replace('%nonce%', nonce)
+    if csp and b'%nonce%' in csp:
+        nonce = base64.urlsafe_b64encode(uuid.uuid4().bytes).rstrip(b'=')
+        csp = csp.replace(b'%nonce%', nonce)
 
     return csp, nonce
--- a/mercurial/hgweb/hgweb_mod.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/hgweb/hgweb_mod.py	Sun Oct 06 09:48:39 2019 -0400
@@ -48,35 +48,35 @@
 
 def getstyle(req, configfn, templatepath):
     styles = (
-        req.qsparams.get('style', None),
-        configfn('web', 'style'),
-        'paper',
+        req.qsparams.get(b'style', None),
+        configfn(b'web', b'style'),
+        b'paper',
     )
     return styles, templater.stylemap(styles, templatepath)
 
 
-def makebreadcrumb(url, prefix=''):
+def makebreadcrumb(url, prefix=b''):
     '''Return a 'URL breadcrumb' list
 
     A 'URL breadcrumb' is a list of URL-name pairs,
     corresponding to each of the path items on a URL.
     This can be used to create path navigation entries.
     '''
-    if url.endswith('/'):
+    if url.endswith(b'/'):
         url = url[:-1]
     if prefix:
-        url = '/' + prefix + url
+        url = b'/' + prefix + url
     relpath = url
-    if relpath.startswith('/'):
+    if relpath.startswith(b'/'):
         relpath = relpath[1:]
 
     breadcrumb = []
     urlel = url
-    pathitems = [''] + relpath.split('/')
+    pathitems = [b''] + relpath.split(b'/')
     for pathel in reversed(pathitems):
         if not pathel or not urlel:
             break
-        breadcrumb.append({'url': urlel, 'name': pathel})
+        breadcrumb.append({b'url': urlel, b'name': pathel})
         urlel = os.path.dirname(urlel)
     return templateutil.mappinglist(reversed(breadcrumb))
 
@@ -95,16 +95,16 @@
         self.req = req
         self.res = res
 
-        self.maxchanges = self.configint('web', 'maxchanges')
-        self.stripecount = self.configint('web', 'stripes')
-        self.maxshortchanges = self.configint('web', 'maxshortchanges')
-        self.maxfiles = self.configint('web', 'maxfiles')
-        self.allowpull = self.configbool('web', 'allow-pull')
+        self.maxchanges = self.configint(b'web', b'maxchanges')
+        self.stripecount = self.configint(b'web', b'stripes')
+        self.maxshortchanges = self.configint(b'web', b'maxshortchanges')
+        self.maxfiles = self.configint(b'web', b'maxfiles')
+        self.allowpull = self.configbool(b'web', b'allow-pull')
 
         # we use untrusted=False to prevent a repo owner from using
         # web.templates in .hg/hgrc to get access to any file readable
         # by the user running the CGI script
-        self.templatepath = self.config('web', 'templates', untrusted=False)
+        self.templatepath = self.config(b'web', b'templates', untrusted=False)
 
         # This object is more expensive to build than simple config values.
         # It is shared across requests. The app will replace the object
@@ -140,27 +140,27 @@
     def templater(self, req):
         # determine scheme, port and server name
         # this is needed to create absolute urls
-        logourl = self.config('web', 'logourl')
-        logoimg = self.config('web', 'logoimg')
+        logourl = self.config(b'web', b'logourl')
+        logoimg = self.config(b'web', b'logoimg')
         staticurl = (
-            self.config('web', 'staticurl')
-            or req.apppath.rstrip('/') + '/static/'
+            self.config(b'web', b'staticurl')
+            or req.apppath.rstrip(b'/') + b'/static/'
         )
-        if not staticurl.endswith('/'):
-            staticurl += '/'
+        if not staticurl.endswith(b'/'):
+            staticurl += b'/'
 
         # figure out which style to use
 
         vars = {}
         styles, (style, mapfile) = getstyle(req, self.config, self.templatepath)
         if style == styles[0]:
-            vars['style'] = style
+            vars[b'style'] = style
 
-        sessionvars = webutil.sessionvars(vars, '?')
+        sessionvars = webutil.sessionvars(vars, b'?')
 
         if not self.reponame:
             self.reponame = (
-                self.config('web', 'name', '')
+                self.config(b'web', b'name', b'')
                 or req.reponame
                 or req.apppath
                 or self.repo.root
@@ -169,30 +169,30 @@
         filters = {}
         templatefilter = registrar.templatefilter(filters)
 
-        @templatefilter('websub', intype=bytes)
+        @templatefilter(b'websub', intype=bytes)
         def websubfilter(text):
             return templatefilters.websub(text, self.websubtable)
 
         # create the templater
         # TODO: export all keywords: defaults = templatekw.keywords.copy()
         defaults = {
-            'url': req.apppath + '/',
-            'logourl': logourl,
-            'logoimg': logoimg,
-            'staticurl': staticurl,
-            'urlbase': req.advertisedbaseurl,
-            'repo': self.reponame,
-            'encoding': encoding.encoding,
-            'sessionvars': sessionvars,
-            'pathdef': makebreadcrumb(req.apppath),
-            'style': style,
-            'nonce': self.nonce,
+            b'url': req.apppath + b'/',
+            b'logourl': logourl,
+            b'logoimg': logoimg,
+            b'staticurl': staticurl,
+            b'urlbase': req.advertisedbaseurl,
+            b'repo': self.reponame,
+            b'encoding': encoding.encoding,
+            b'sessionvars': sessionvars,
+            b'pathdef': makebreadcrumb(req.apppath),
+            b'style': style,
+            b'nonce': self.nonce,
         }
         templatekeyword = registrar.templatekeyword(defaults)
 
-        @templatekeyword('motd', requires=())
+        @templatekeyword(b'motd', requires=())
         def motd(context, mapping):
-            yield self.config('web', 'motd')
+            yield self.config(b'web', b'motd')
 
         tres = formatter.templateresources(self.repo.ui, self.repo)
         tmpl = templater.templater.frommapfile(
@@ -232,23 +232,23 @@
             # we trust caller to give us a private copy
             r = repo
 
-        r.ui.setconfig('ui', 'report_untrusted', 'off', 'hgweb')
-        r.baseui.setconfig('ui', 'report_untrusted', 'off', 'hgweb')
-        r.ui.setconfig('ui', 'nontty', 'true', 'hgweb')
-        r.baseui.setconfig('ui', 'nontty', 'true', 'hgweb')
+        r.ui.setconfig(b'ui', b'report_untrusted', b'off', b'hgweb')
+        r.baseui.setconfig(b'ui', b'report_untrusted', b'off', b'hgweb')
+        r.ui.setconfig(b'ui', b'nontty', b'true', b'hgweb')
+        r.baseui.setconfig(b'ui', b'nontty', b'true', b'hgweb')
         # resolve file patterns relative to repo root
-        r.ui.setconfig('ui', 'forcecwd', r.root, 'hgweb')
-        r.baseui.setconfig('ui', 'forcecwd', r.root, 'hgweb')
+        r.ui.setconfig(b'ui', b'forcecwd', r.root, b'hgweb')
+        r.baseui.setconfig(b'ui', b'forcecwd', r.root, b'hgweb')
         # it's unlikely that we can replace signal handlers in WSGI server,
         # and mod_wsgi issues a big warning. a plain hgweb process (with no
         # threading) could replace signal handlers, but we don't bother
         # conditionally enabling it.
-        r.ui.setconfig('ui', 'signal-safe-lock', 'false', 'hgweb')
-        r.baseui.setconfig('ui', 'signal-safe-lock', 'false', 'hgweb')
+        r.ui.setconfig(b'ui', b'signal-safe-lock', b'false', b'hgweb')
+        r.baseui.setconfig(b'ui', b'signal-safe-lock', b'false', b'hgweb')
         # displaying bundling progress bar while serving feel wrong and may
         # break some wsgi implementation.
-        r.ui.setconfig('progress', 'disable', 'true', 'hgweb')
-        r.baseui.setconfig('progress', 'disable', 'true', 'hgweb')
+        r.ui.setconfig(b'progress', b'disable', b'true', b'hgweb')
+        r.baseui.setconfig(b'progress', b'disable', b'true', b'hgweb')
         self._repos = [hg.cachedlocalrepo(self._webifyrepo(r))]
         self._lastrepo = self._repos[0]
         hook.redirect(True)
@@ -294,12 +294,12 @@
         Modern servers should be using WSGI and should avoid this
         method, if possible.
         """
-        if not encoding.environ.get('GATEWAY_INTERFACE', '').startswith(
-            "CGI/1."
+        if not encoding.environ.get(b'GATEWAY_INTERFACE', b'').startswith(
+            b"CGI/1."
         ):
             raise RuntimeError(
-                "This function is only intended to be "
-                "called while running as a CGI script."
+                b"This function is only intended to be "
+                b"called while running as a CGI script."
             )
         wsgicgi.launch(self)
 
@@ -320,7 +320,7 @@
         should be using instances of this class as the WSGI application.
         """
         with self._obtainrepo() as repo:
-            profile = repo.ui.configbool('profiling', 'enabled')
+            profile = repo.ui.configbool(b'profiling', b'enabled')
             with profiling.profile(repo.ui, enabled=profile):
                 for r in self._runwsgi(req, res, repo):
                     yield r
@@ -329,19 +329,19 @@
         rctx = requestcontext(self, repo, req, res)
 
         # This state is global across all threads.
-        encoding.encoding = rctx.config('web', 'encoding')
+        encoding.encoding = rctx.config(b'web', b'encoding')
         rctx.repo.ui.environ = req.rawenv
 
         if rctx.csp:
             # hgwebdir may have added CSP header. Since we generate our own,
             # replace it.
-            res.headers['Content-Security-Policy'] = rctx.csp
+            res.headers[b'Content-Security-Policy'] = rctx.csp
 
         # /api/* is reserved for various API implementations. Dispatch
         # accordingly. But URL paths can conflict with subrepos and virtual
         # repos in hgwebdir. So until we have a workaround for this, only
         # expose the URLs if the feature is enabled.
-        apienabled = rctx.repo.ui.configbool('experimental', 'web.apiserver')
+        apienabled = rctx.repo.ui.configbool(b'experimental', b'web.apiserver')
         if apienabled and req.dispatchparts and req.dispatchparts[0] == b'api':
             wireprotoserver.handlewsgiapirequest(
                 rctx, req, res, self.check_perm
@@ -361,70 +361,70 @@
         if req.dispatchpath is not None:
             query = req.dispatchpath
         else:
-            query = req.querystring.partition('&')[0].partition(';')[0]
+            query = req.querystring.partition(b'&')[0].partition(b';')[0]
 
         # translate user-visible url structure to internal structure
 
-        args = query.split('/', 2)
-        if 'cmd' not in req.qsparams and args and args[0]:
+        args = query.split(b'/', 2)
+        if b'cmd' not in req.qsparams and args and args[0]:
             cmd = args.pop(0)
-            style = cmd.rfind('-')
+            style = cmd.rfind(b'-')
             if style != -1:
-                req.qsparams['style'] = cmd[:style]
+                req.qsparams[b'style'] = cmd[:style]
                 cmd = cmd[style + 1 :]
 
             # avoid accepting e.g. style parameter as command
             if util.safehasattr(webcommands, cmd):
-                req.qsparams['cmd'] = cmd
+                req.qsparams[b'cmd'] = cmd
 
-            if cmd == 'static':
-                req.qsparams['file'] = '/'.join(args)
+            if cmd == b'static':
+                req.qsparams[b'file'] = b'/'.join(args)
             else:
                 if args and args[0]:
-                    node = args.pop(0).replace('%2F', '/')
-                    req.qsparams['node'] = node
+                    node = args.pop(0).replace(b'%2F', b'/')
+                    req.qsparams[b'node'] = node
                 if args:
-                    if 'file' in req.qsparams:
-                        del req.qsparams['file']
+                    if b'file' in req.qsparams:
+                        del req.qsparams[b'file']
                     for a in args:
-                        req.qsparams.add('file', a)
+                        req.qsparams.add(b'file', a)
 
-            ua = req.headers.get('User-Agent', '')
-            if cmd == 'rev' and 'mercurial' in ua:
-                req.qsparams['style'] = 'raw'
+            ua = req.headers.get(b'User-Agent', b'')
+            if cmd == b'rev' and b'mercurial' in ua:
+                req.qsparams[b'style'] = b'raw'
 
-            if cmd == 'archive':
-                fn = req.qsparams['node']
+            if cmd == b'archive':
+                fn = req.qsparams[b'node']
                 for type_, spec in webutil.archivespecs.iteritems():
                     ext = spec[2]
                     if fn.endswith(ext):
-                        req.qsparams['node'] = fn[: -len(ext)]
-                        req.qsparams['type'] = type_
+                        req.qsparams[b'node'] = fn[: -len(ext)]
+                        req.qsparams[b'type'] = type_
         else:
-            cmd = req.qsparams.get('cmd', '')
+            cmd = req.qsparams.get(b'cmd', b'')
 
         # process the web interface request
 
         try:
             rctx.tmpl = rctx.templater(req)
             ctype = rctx.tmpl.render(
-                'mimetype', {'encoding': encoding.encoding}
+                b'mimetype', {b'encoding': encoding.encoding}
             )
 
             # check read permissions non-static content
-            if cmd != 'static':
+            if cmd != b'static':
                 self.check_perm(rctx, req, None)
 
-            if cmd == '':
-                req.qsparams['cmd'] = rctx.tmpl.render('default', {})
-                cmd = req.qsparams['cmd']
+            if cmd == b'':
+                req.qsparams[b'cmd'] = rctx.tmpl.render(b'default', {})
+                cmd = req.qsparams[b'cmd']
 
             # Don't enable caching if using a CSP nonce because then it wouldn't
             # be a nonce.
-            if rctx.configbool('web', 'cache') and not rctx.nonce:
-                tag = 'W/"%d"' % self.mtime
-                if req.headers.get('If-None-Match') == tag:
-                    res.status = '304 Not Modified'
+            if rctx.configbool(b'web', b'cache') and not rctx.nonce:
+                tag = b'W/"%d"' % self.mtime
+                if req.headers.get(b'If-None-Match') == tag:
+                    res.status = b'304 Not Modified'
                     # Content-Type may be defined globally. It isn't valid on a
                     # 304, so discard it.
                     try:
@@ -432,45 +432,45 @@
                     except KeyError:
                         pass
                     # Response body not allowed on 304.
-                    res.setbodybytes('')
+                    res.setbodybytes(b'')
                     return res.sendresponse()
 
-                res.headers['ETag'] = tag
+                res.headers[b'ETag'] = tag
 
             if cmd not in webcommands.__all__:
-                msg = 'no such method: %s' % cmd
+                msg = b'no such method: %s' % cmd
                 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
             else:
                 # Set some globals appropriate for web handlers. Commands can
                 # override easily enough.
-                res.status = '200 Script output follows'
-                res.headers['Content-Type'] = ctype
+                res.status = b'200 Script output follows'
+                res.headers[b'Content-Type'] = ctype
                 return getattr(webcommands, cmd)(rctx)
 
         except (error.LookupError, error.RepoLookupError) as err:
             msg = pycompat.bytestr(err)
-            if util.safehasattr(err, 'name') and not isinstance(
+            if util.safehasattr(err, b'name') and not isinstance(
                 err, error.ManifestLookupError
             ):
-                msg = 'revision not found: %s' % err.name
+                msg = b'revision not found: %s' % err.name
 
-            res.status = '404 Not Found'
-            res.headers['Content-Type'] = ctype
-            return rctx.sendtemplate('error', error=msg)
+            res.status = b'404 Not Found'
+            res.headers[b'Content-Type'] = ctype
+            return rctx.sendtemplate(b'error', error=msg)
         except (error.RepoError, error.StorageError) as e:
-            res.status = '500 Internal Server Error'
-            res.headers['Content-Type'] = ctype
-            return rctx.sendtemplate('error', error=pycompat.bytestr(e))
+            res.status = b'500 Internal Server Error'
+            res.headers[b'Content-Type'] = ctype
+            return rctx.sendtemplate(b'error', error=pycompat.bytestr(e))
         except error.Abort as e:
-            res.status = '403 Forbidden'
-            res.headers['Content-Type'] = ctype
-            return rctx.sendtemplate('error', error=pycompat.bytestr(e))
+            res.status = b'403 Forbidden'
+            res.headers[b'Content-Type'] = ctype
+            return rctx.sendtemplate(b'error', error=pycompat.bytestr(e))
         except ErrorResponse as e:
             for k, v in e.headers:
                 res.headers[k] = v
             res.status = statusmessage(e.code, pycompat.bytestr(e))
-            res.headers['Content-Type'] = ctype
-            return rctx.sendtemplate('error', error=pycompat.bytestr(e))
+            res.headers[b'Content-Type'] = ctype
+            return rctx.sendtemplate(b'error', error=pycompat.bytestr(e))
 
     def check_perm(self, rctx, req, op):
         for permhook in permhooks:
@@ -489,10 +489,10 @@
     The option has been around undocumented since Mercurial 2.5, but no
     user ever asked about it. So we better keep it undocumented for now."""
     # experimental config: web.view
-    viewconfig = repo.ui.config('web', 'view', untrusted=True)
-    if viewconfig == 'all':
+    viewconfig = repo.ui.config(b'web', b'view', untrusted=True)
+    if viewconfig == b'all':
         return repo.unfiltered()
     elif viewconfig in repoview.filtertable:
         return repo.filtered(viewconfig)
     else:
-        return repo.filtered('served')
+        return repo.filtered(b'served')
--- a/mercurial/hgweb/hgwebdir_mod.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/hgweb/hgwebdir_mod.py	Sun Oct 06 09:48:39 2019 -0400
@@ -52,7 +52,7 @@
 
 
 def cleannames(items):
-    return [(util.pconvert(name).strip('/'), path) for name, path in items]
+    return [(util.pconvert(name).strip(b'/'), path) for name, path in items]
 
 
 def findrepos(paths):
@@ -64,7 +64,7 @@
         # '*' will not search inside dirs with .hg (except .hg/patches),
         # '**' will search inside dirs with .hg (and thus also find subrepos).
         try:
-            recurse = {'*': False, '**': True}[roottail]
+            recurse = {b'*': False, b'**': True}[roottail]
         except KeyError:
             repos.append((prefix, root))
             continue
@@ -86,8 +86,8 @@
     for path in paths:
         path = os.path.normpath(path)
         yield (
-            prefix + '/' + util.pconvert(path[len(roothead) :]).lstrip('/')
-        ).strip('/'), path
+            prefix + b'/' + util.pconvert(path[len(roothead) :]).lstrip(b'/')
+        ).strip(b'/'), path
 
 
 def readallowed(ui, req):
@@ -101,11 +101,11 @@
 
     user = req.remoteuser
 
-    deny_read = ui.configlist('web', 'deny_read', untrusted=True)
+    deny_read = ui.configlist(b'web', b'deny_read', untrusted=True)
     if deny_read and (not user or ismember(ui, user, deny_read)):
         return False
 
-    allow_read = ui.configlist('web', 'allow_read', untrusted=True)
+    allow_read = ui.configlist(b'web', b'allow_read', untrusted=True)
     # by default, allow reading if no allow_read option has been set
     if not allow_read or ismember(ui, user, allow_read):
         return True
@@ -113,9 +113,9 @@
     return False
 
 
-def rawindexentries(ui, repos, req, subdir=''):
-    descend = ui.configbool('web', 'descend')
-    collapse = ui.configbool('web', 'collapse')
+def rawindexentries(ui, repos, req, subdir=b''):
+    descend = ui.configbool(b'web', b'descend')
+    collapse = ui.configbool(b'web', b'collapse')
     seenrepos = set()
     seendirs = set()
     for name, path in repos:
@@ -125,11 +125,11 @@
         name = name[len(subdir) :]
         directory = False
 
-        if '/' in name:
+        if b'/' in name:
             if not descend:
                 continue
 
-            nameparts = name.split('/')
+            nameparts = name.split(b'/')
             rootname = nameparts[0]
 
             if not collapse:
@@ -143,7 +143,7 @@
                 name = rootname
 
                 # redefine the path to refer to the directory
-                discarded = '/'.join(nameparts[1:])
+                discarded = b'/'.join(nameparts[1:])
 
                 # remove name parts plus accompanying slash
                 path = path[: -len(discarded) - 1]
@@ -155,11 +155,11 @@
                     pass
 
         parts = [
-            req.apppath.strip('/'),
-            subdir.strip('/'),
-            name.strip('/'),
+            req.apppath.strip(b'/'),
+            subdir.strip(b'/'),
+            name.strip(b'/'),
         ]
-        url = '/' + '/'.join(p for p in parts if p) + '/'
+        url = b'/' + b'/'.join(p for p in parts if p) + b'/'
 
         # show either a directory entry or a repository
         if directory:
@@ -172,18 +172,18 @@
             # add '/' to the name to make it obvious that
             # the entry is a directory, not a regular repository
             row = {
-                'contact': "",
-                'contact_sort': "",
-                'name': name + '/',
-                'name_sort': name,
-                'url': url,
-                'description': "",
-                'description_sort': "",
-                'lastchange': d,
-                'lastchange_sort': d[1] - d[0],
-                'archives': templateutil.mappinglist([]),
-                'isdirectory': True,
-                'labels': templateutil.hybridlist([], name='label'),
+                b'contact': b"",
+                b'contact_sort': b"",
+                b'name': name + b'/',
+                b'name_sort': name,
+                b'url': url,
+                b'description': b"",
+                b'description_sort': b"",
+                b'lastchange': d,
+                b'lastchange_sort': d[1] - d[0],
+                b'archives': templateutil.mappinglist([]),
+                b'isdirectory': True,
+                b'labels': templateutil.hybridlist([], name=b'label'),
             }
 
             seendirs.add(name)
@@ -192,15 +192,15 @@
 
         u = ui.copy()
         try:
-            u.readconfig(os.path.join(path, '.hg', 'hgrc'))
+            u.readconfig(os.path.join(path, b'.hg', b'hgrc'))
         except Exception as e:
-            u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e))
+            u.warn(_(b'error reading %s/.hg/hgrc: %s\n') % (path, e))
             continue
 
         def get(section, name, default=uimod._unset):
             return u.config(section, name, default, untrusted=True)
 
-        if u.configbool("web", "hidden", untrusted=True):
+        if u.configbool(b"web", b"hidden", untrusted=True):
             continue
 
         if not readallowed(u, req):
@@ -210,10 +210,10 @@
         try:
             r = hg.repository(ui, path)
         except IOError:
-            u.warn(_('error accessing repository at %s\n') % path)
+            u.warn(_(b'error accessing repository at %s\n') % path)
             continue
         except error.RepoError:
-            u.warn(_('error accessing repository at %s\n') % path)
+            u.warn(_(b'error accessing repository at %s\n') % path)
             continue
         try:
             d = (get_mtime(r.spath), dateutil.makedate()[1])
@@ -221,23 +221,23 @@
             continue
 
         contact = get_contact(get)
-        description = get("web", "description")
+        description = get(b"web", b"description")
         seenrepos.add(name)
-        name = get("web", "name", name)
-        labels = u.configlist('web', 'labels', untrusted=True)
+        name = get(b"web", b"name", name)
+        labels = u.configlist(b'web', b'labels', untrusted=True)
         row = {
-            'contact': contact or "unknown",
-            'contact_sort': contact.upper() or "unknown",
-            'name': name,
-            'name_sort': name,
-            'url': url,
-            'description': description or "unknown",
-            'description_sort': description.upper() or "unknown",
-            'lastchange': d,
-            'lastchange_sort': d[1] - d[0],
-            'archives': webutil.archivelist(u, "tip", url),
-            'isdirectory': None,
-            'labels': templateutil.hybridlist(labels, name='label'),
+            b'contact': contact or b"unknown",
+            b'contact_sort': contact.upper() or b"unknown",
+            b'name': name,
+            b'name_sort': name,
+            b'url': url,
+            b'description': description or b"unknown",
+            b'description_sort': description.upper() or b"unknown",
+            b'lastchange': d,
+            b'lastchange_sort': d[1] - d[0],
+            b'archives': webutil.archivelist(u, b"tip", url),
+            b'isdirectory': None,
+            b'labels': templateutil.hybridlist(labels, name=b'label'),
         }
 
         yield row
@@ -251,16 +251,16 @@
     sortdefault = None, False
 
     if sortcolumn and sortdefault != (sortcolumn, descending):
-        sortkey = '%s_sort' % sortcolumn
+        sortkey = b'%s_sort' % sortcolumn
         rows = sorted(rows, key=lambda x: x[sortkey], reverse=descending)
 
     for row, parity in zip(rows, paritygen(stripecount)):
-        row['parity'] = parity
+        row[b'parity'] = parity
         yield row
 
 
 def indexentries(
-    ui, repos, req, stripecount, sortcolumn='', descending=False, subdir=''
+    ui, repos, req, stripecount, sortcolumn=b'', descending=False, subdir=b''
 ):
     args = (ui, repos, req, stripecount, sortcolumn, descending, subdir)
     return templateutil.mappinggenerator(_indexentriesgen, args=args)
@@ -289,9 +289,9 @@
 
     def refresh(self):
         if self.ui:
-            refreshinterval = self.ui.configint('web', 'refreshinterval')
+            refreshinterval = self.ui.configint(b'web', b'refreshinterval')
         else:
-            item = configitems.coreitems['web']['refreshinterval']
+            item = configitems.coreitems[b'web'][b'refreshinterval']
             refreshinterval = item.default
 
         # refreshinterval <= 0 means to always refresh.
@@ -305,20 +305,20 @@
             u = self.baseui.copy()
         else:
             u = uimod.ui.load()
-            u.setconfig('ui', 'report_untrusted', 'off', 'hgwebdir')
-            u.setconfig('ui', 'nontty', 'true', 'hgwebdir')
+            u.setconfig(b'ui', b'report_untrusted', b'off', b'hgwebdir')
+            u.setconfig(b'ui', b'nontty', b'true', b'hgwebdir')
             # displaying bundling progress bar while serving feels wrong and may
             # break some wsgi implementations.
-            u.setconfig('progress', 'disable', 'true', 'hgweb')
+            u.setconfig(b'progress', b'disable', b'true', b'hgweb')
 
         if not isinstance(self.conf, (dict, list, tuple)):
-            map = {'paths': 'hgweb-paths'}
+            map = {b'paths': b'hgweb-paths'}
             if not os.path.exists(self.conf):
-                raise error.Abort(_('config file %s not found!') % self.conf)
+                raise error.Abort(_(b'config file %s not found!') % self.conf)
             u.readconfig(self.conf, remap=map, trust=True)
             paths = []
-            for name, ignored in u.configitems('hgweb-paths'):
-                for path in u.configlist('hgweb-paths', name):
+            for name, ignored in u.configitems(b'hgweb-paths'):
+                for path in u.configlist(b'hgweb-paths', name):
                     paths.append((name, path))
         elif isinstance(self.conf, (list, tuple)):
             paths = self.conf
@@ -327,50 +327,52 @@
         extensions.populateui(u)
 
         repos = findrepos(paths)
-        for prefix, root in u.configitems('collections'):
+        for prefix, root in u.configitems(b'collections'):
             prefix = util.pconvert(prefix)
             for path in scmutil.walkrepos(root, followsym=True):
                 repo = os.path.normpath(path)
                 name = util.pconvert(repo)
                 if name.startswith(prefix):
                     name = name[len(prefix) :]
-                repos.append((name.lstrip('/'), repo))
+                repos.append((name.lstrip(b'/'), repo))
 
         self.repos = repos
         self.ui = u
-        encoding.encoding = self.ui.config('web', 'encoding')
-        self.style = self.ui.config('web', 'style')
-        self.templatepath = self.ui.config('web', 'templates', untrusted=False)
-        self.stripecount = self.ui.config('web', 'stripes')
+        encoding.encoding = self.ui.config(b'web', b'encoding')
+        self.style = self.ui.config(b'web', b'style')
+        self.templatepath = self.ui.config(
+            b'web', b'templates', untrusted=False
+        )
+        self.stripecount = self.ui.config(b'web', b'stripes')
         if self.stripecount:
             self.stripecount = int(self.stripecount)
-        prefix = self.ui.config('web', 'prefix')
-        if prefix.startswith('/'):
+        prefix = self.ui.config(b'web', b'prefix')
+        if prefix.startswith(b'/'):
             prefix = prefix[1:]
-        if prefix.endswith('/'):
+        if prefix.endswith(b'/'):
             prefix = prefix[:-1]
         self.prefix = prefix
         self.lastrefresh = time.time()
 
     def run(self):
-        if not encoding.environ.get('GATEWAY_INTERFACE', '').startswith(
-            "CGI/1."
+        if not encoding.environ.get(b'GATEWAY_INTERFACE', b'').startswith(
+            b"CGI/1."
         ):
             raise RuntimeError(
-                "This function is only intended to be "
-                "called while running as a CGI script."
+                b"This function is only intended to be "
+                b"called while running as a CGI script."
             )
         wsgicgi.launch(self)
 
     def __call__(self, env, respond):
-        baseurl = self.ui.config('web', 'baseurl')
+        baseurl = self.ui.config(b'web', b'baseurl')
         req = requestmod.parserequestfromenv(env, altbaseurl=baseurl)
         res = requestmod.wsgiresponse(req, respond)
 
         return self.run_wsgi(req, res)
 
     def run_wsgi(self, req, res):
-        profile = self.ui.configbool('profiling', 'enabled')
+        profile = self.ui.configbool(b'profiling', b'enabled')
         with profiling.profile(self.ui, enabled=profile):
             try:
                 for r in self._runwsgi(req, res):
@@ -391,28 +393,28 @@
 
             csp, nonce = cspvalues(self.ui)
             if csp:
-                res.headers['Content-Security-Policy'] = csp
+                res.headers[b'Content-Security-Policy'] = csp
 
-            virtual = req.dispatchpath.strip('/')
+            virtual = req.dispatchpath.strip(b'/')
             tmpl = self.templater(req, nonce)
-            ctype = tmpl.render('mimetype', {'encoding': encoding.encoding})
+            ctype = tmpl.render(b'mimetype', {b'encoding': encoding.encoding})
 
             # Global defaults. These can be overridden by any handler.
-            res.status = '200 Script output follows'
-            res.headers['Content-Type'] = ctype
+            res.status = b'200 Script output follows'
+            res.headers[b'Content-Type'] = ctype
 
             # a static file
-            if virtual.startswith('static/') or 'static' in req.qsparams:
-                if virtual.startswith('static/'):
+            if virtual.startswith(b'static/') or b'static' in req.qsparams:
+                if virtual.startswith(b'static/'):
                     fname = virtual[7:]
                 else:
-                    fname = req.qsparams['static']
-                static = self.ui.config("web", "static", untrusted=False)
+                    fname = req.qsparams[b'static']
+                static = self.ui.config(b"web", b"static", untrusted=False)
                 if not static:
                     tp = self.templatepath or templater.templatepaths()
                     if isinstance(tp, str):
                         tp = [tp]
-                    static = [os.path.join(p, 'static') for p in tp]
+                    static = [os.path.join(p, b'static') for p in tp]
 
                 staticfile(static, fname, res)
                 return res.sendresponse()
@@ -421,13 +423,13 @@
 
             repos = dict(self.repos)
 
-            if (not virtual or virtual == 'index') and virtual not in repos:
+            if (not virtual or virtual == b'index') and virtual not in repos:
                 return self.makeindex(req, res, tmpl)
 
             # nested indexes and hgwebs
 
-            if virtual.endswith('/index') and virtual not in repos:
-                subdir = virtual[: -len('index')]
+            if virtual.endswith(b'/index') and virtual not in repos:
+                subdir = virtual[: -len(b'index')]
                 if any(r.startswith(subdir) for r in repos):
                     return self.makeindex(req, res, tmpl, subdir)
 
@@ -450,7 +452,7 @@
                     req = requestmod.parserequestfromenv(
                         uenv,
                         reponame=virtualrepo,
-                        altbaseurl=self.ui.config('web', 'baseurl'),
+                        altbaseurl=self.ui.config(b'web', b'baseurl'),
                         # Reuse wrapped body file object otherwise state
                         # tracking can get confused.
                         bodyfh=req.bodyfh,
@@ -466,40 +468,42 @@
                         raise ErrorResponse(HTTP_SERVER_ERROR, bytes(inst))
 
             # browse subdirectories
-            subdir = virtual + '/'
+            subdir = virtual + b'/'
             if [r for r in repos if r.startswith(subdir)]:
                 return self.makeindex(req, res, tmpl, subdir)
 
             # prefixes not found
-            res.status = '404 Not Found'
-            res.setbodygen(tmpl.generate('notfound', {'repo': virtual}))
+            res.status = b'404 Not Found'
+            res.setbodygen(tmpl.generate(b'notfound', {b'repo': virtual}))
             return res.sendresponse()
 
         except ErrorResponse as e:
             res.status = statusmessage(e.code, pycompat.bytestr(e))
-            res.setbodygen(tmpl.generate('error', {'error': e.message or ''}))
+            res.setbodygen(
+                tmpl.generate(b'error', {b'error': e.message or b''})
+            )
             return res.sendresponse()
         finally:
             tmpl = None
 
-    def makeindex(self, req, res, tmpl, subdir=""):
+    def makeindex(self, req, res, tmpl, subdir=b""):
         self.refresh()
-        sortable = ["name", "description", "contact", "lastchange"]
+        sortable = [b"name", b"description", b"contact", b"lastchange"]
         sortcolumn, descending = None, False
-        if 'sort' in req.qsparams:
-            sortcolumn = req.qsparams['sort']
-            descending = sortcolumn.startswith('-')
+        if b'sort' in req.qsparams:
+            sortcolumn = req.qsparams[b'sort']
+            descending = sortcolumn.startswith(b'-')
             if descending:
                 sortcolumn = sortcolumn[1:]
             if sortcolumn not in sortable:
-                sortcolumn = ""
+                sortcolumn = b""
 
         sort = [
             (
-                "sort_%s" % column,
-                "%s%s"
+                b"sort_%s" % column,
+                b"%s%s"
                 % (
-                    (not descending and column == sortcolumn) and "-" or "",
+                    (not descending and column == sortcolumn) and b"-" or b"",
                     column,
                 ),
             )
@@ -519,14 +523,14 @@
         )
 
         mapping = {
-            'entries': entries,
-            'subdir': subdir,
-            'pathdef': hgweb_mod.makebreadcrumb('/' + subdir, self.prefix),
-            'sortcolumn': sortcolumn,
-            'descending': descending,
+            b'entries': entries,
+            b'subdir': subdir,
+            b'pathdef': hgweb_mod.makebreadcrumb(b'/' + subdir, self.prefix),
+            b'sortcolumn': sortcolumn,
+            b'descending': descending,
         }
         mapping.update(sort)
-        res.setbodygen(tmpl.generate('index', mapping))
+        res.setbodygen(tmpl.generate(b'index', mapping))
         return res.sendresponse()
 
     def templater(self, req, nonce):
@@ -538,35 +542,36 @@
             req, config, self.templatepath
         )
         if style == styles[0]:
-            vars['style'] = style
+            vars[b'style'] = style
 
-        sessionvars = webutil.sessionvars(vars, '?')
-        logourl = config('web', 'logourl')
-        logoimg = config('web', 'logoimg')
+        sessionvars = webutil.sessionvars(vars, b'?')
+        logourl = config(b'web', b'logourl')
+        logoimg = config(b'web', b'logoimg')
         staticurl = (
-            config('web', 'staticurl') or req.apppath.rstrip('/') + '/static/'
+            config(b'web', b'staticurl')
+            or req.apppath.rstrip(b'/') + b'/static/'
         )
-        if not staticurl.endswith('/'):
-            staticurl += '/'
+        if not staticurl.endswith(b'/'):
+            staticurl += b'/'
 
         defaults = {
-            "encoding": encoding.encoding,
-            "url": req.apppath + '/',
-            "logourl": logourl,
-            "logoimg": logoimg,
-            "staticurl": staticurl,
-            "sessionvars": sessionvars,
-            "style": style,
-            "nonce": nonce,
+            b"encoding": encoding.encoding,
+            b"url": req.apppath + b'/',
+            b"logourl": logourl,
+            b"logoimg": logoimg,
+            b"staticurl": staticurl,
+            b"sessionvars": sessionvars,
+            b"style": style,
+            b"nonce": nonce,
         }
         templatekeyword = registrar.templatekeyword(defaults)
 
-        @templatekeyword('motd', requires=())
+        @templatekeyword(b'motd', requires=())
         def motd(context, mapping):
             if self.motd is not None:
                 yield self.motd
             else:
-                yield config('web', 'motd')
+                yield config(b'web', b'motd')
 
         tmpl = templater.templater.frommapfile(mapfile, defaults=defaults)
         return tmpl
--- a/mercurial/hgweb/request.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/hgweb/request.py	Sun Oct 06 09:48:39 2019 -0400
@@ -69,7 +69,7 @@
         vals = self._items[key]
 
         if len(vals) > 1:
-            raise KeyError('multiple values for %r' % key)
+            raise KeyError(b'multiple values for %r' % key)
 
         return vals[0]
 
@@ -172,7 +172,7 @@
     # to an hgweb instance using this environment variable.  This was always
     # checked prior to d7fd203e36cc; keep doing so to avoid breaking them.
     if not reponame:
-        reponame = env.get('REPO_NAME')
+        reponame = env.get(b'REPO_NAME')
 
     if altbaseurl:
         altbaseurl = util.url(altbaseurl)
@@ -181,114 +181,114 @@
     # the environment variables.
     # https://www.python.org/dev/peps/pep-0333/#url-reconstruction defines
     # how URLs are reconstructed.
-    fullurl = env['wsgi.url_scheme'] + '://'
+    fullurl = env[b'wsgi.url_scheme'] + b'://'
 
     if altbaseurl and altbaseurl.scheme:
-        advertisedfullurl = altbaseurl.scheme + '://'
+        advertisedfullurl = altbaseurl.scheme + b'://'
     else:
         advertisedfullurl = fullurl
 
     def addport(s, port):
-        if s.startswith('https://'):
-            if port != '443':
-                s += ':' + port
+        if s.startswith(b'https://'):
+            if port != b'443':
+                s += b':' + port
         else:
-            if port != '80':
-                s += ':' + port
+            if port != b'80':
+                s += b':' + port
 
         return s
 
-    if env.get('HTTP_HOST'):
-        fullurl += env['HTTP_HOST']
+    if env.get(b'HTTP_HOST'):
+        fullurl += env[b'HTTP_HOST']
     else:
-        fullurl += env['SERVER_NAME']
-        fullurl = addport(fullurl, env['SERVER_PORT'])
+        fullurl += env[b'SERVER_NAME']
+        fullurl = addport(fullurl, env[b'SERVER_PORT'])
 
     if altbaseurl and altbaseurl.host:
         advertisedfullurl += altbaseurl.host
 
         if altbaseurl.port:
             port = altbaseurl.port
-        elif altbaseurl.scheme == 'http' and not altbaseurl.port:
-            port = '80'
-        elif altbaseurl.scheme == 'https' and not altbaseurl.port:
-            port = '443'
+        elif altbaseurl.scheme == b'http' and not altbaseurl.port:
+            port = b'80'
+        elif altbaseurl.scheme == b'https' and not altbaseurl.port:
+            port = b'443'
         else:
-            port = env['SERVER_PORT']
+            port = env[b'SERVER_PORT']
 
         advertisedfullurl = addport(advertisedfullurl, port)
     else:
-        advertisedfullurl += env['SERVER_NAME']
-        advertisedfullurl = addport(advertisedfullurl, env['SERVER_PORT'])
+        advertisedfullurl += env[b'SERVER_NAME']
+        advertisedfullurl = addport(advertisedfullurl, env[b'SERVER_PORT'])
 
     baseurl = fullurl
     advertisedbaseurl = advertisedfullurl
 
-    fullurl += util.urlreq.quote(env.get('SCRIPT_NAME', ''))
-    fullurl += util.urlreq.quote(env.get('PATH_INFO', ''))
+    fullurl += util.urlreq.quote(env.get(b'SCRIPT_NAME', b''))
+    fullurl += util.urlreq.quote(env.get(b'PATH_INFO', b''))
 
     if altbaseurl:
-        path = altbaseurl.path or ''
-        if path and not path.startswith('/'):
-            path = '/' + path
+        path = altbaseurl.path or b''
+        if path and not path.startswith(b'/'):
+            path = b'/' + path
         advertisedfullurl += util.urlreq.quote(path)
     else:
-        advertisedfullurl += util.urlreq.quote(env.get('SCRIPT_NAME', ''))
+        advertisedfullurl += util.urlreq.quote(env.get(b'SCRIPT_NAME', b''))
 
-    advertisedfullurl += util.urlreq.quote(env.get('PATH_INFO', ''))
+    advertisedfullurl += util.urlreq.quote(env.get(b'PATH_INFO', b''))
 
-    if env.get('QUERY_STRING'):
-        fullurl += '?' + env['QUERY_STRING']
-        advertisedfullurl += '?' + env['QUERY_STRING']
+    if env.get(b'QUERY_STRING'):
+        fullurl += b'?' + env[b'QUERY_STRING']
+        advertisedfullurl += b'?' + env[b'QUERY_STRING']
 
     # If ``reponame`` is defined, that must be a prefix on PATH_INFO
     # that represents the repository being dispatched to. When computing
     # the dispatch info, we ignore these leading path components.
 
     if altbaseurl:
-        apppath = altbaseurl.path or ''
-        if apppath and not apppath.startswith('/'):
-            apppath = '/' + apppath
+        apppath = altbaseurl.path or b''
+        if apppath and not apppath.startswith(b'/'):
+            apppath = b'/' + apppath
     else:
-        apppath = env.get('SCRIPT_NAME', '')
+        apppath = env.get(b'SCRIPT_NAME', b'')
 
     if reponame:
-        repoprefix = '/' + reponame.strip('/')
+        repoprefix = b'/' + reponame.strip(b'/')
 
-        if not env.get('PATH_INFO'):
-            raise error.ProgrammingError('reponame requires PATH_INFO')
+        if not env.get(b'PATH_INFO'):
+            raise error.ProgrammingError(b'reponame requires PATH_INFO')
 
-        if not env['PATH_INFO'].startswith(repoprefix):
+        if not env[b'PATH_INFO'].startswith(repoprefix):
             raise error.ProgrammingError(
-                'PATH_INFO does not begin with repo '
-                'name: %s (%s)' % (env['PATH_INFO'], reponame)
+                b'PATH_INFO does not begin with repo '
+                b'name: %s (%s)' % (env[b'PATH_INFO'], reponame)
             )
 
-        dispatchpath = env['PATH_INFO'][len(repoprefix) :]
+        dispatchpath = env[b'PATH_INFO'][len(repoprefix) :]
 
-        if dispatchpath and not dispatchpath.startswith('/'):
+        if dispatchpath and not dispatchpath.startswith(b'/'):
             raise error.ProgrammingError(
-                'reponame prefix of PATH_INFO does '
-                'not end at path delimiter: %s (%s)'
-                % (env['PATH_INFO'], reponame)
+                b'reponame prefix of PATH_INFO does '
+                b'not end at path delimiter: %s (%s)'
+                % (env[b'PATH_INFO'], reponame)
             )
 
-        apppath = apppath.rstrip('/') + repoprefix
-        dispatchparts = dispatchpath.strip('/').split('/')
-        dispatchpath = '/'.join(dispatchparts)
+        apppath = apppath.rstrip(b'/') + repoprefix
+        dispatchparts = dispatchpath.strip(b'/').split(b'/')
+        dispatchpath = b'/'.join(dispatchparts)
 
-    elif 'PATH_INFO' in env:
-        if env['PATH_INFO'].strip('/'):
-            dispatchparts = env['PATH_INFO'].strip('/').split('/')
-            dispatchpath = '/'.join(dispatchparts)
+    elif b'PATH_INFO' in env:
+        if env[b'PATH_INFO'].strip(b'/'):
+            dispatchparts = env[b'PATH_INFO'].strip(b'/').split(b'/')
+            dispatchpath = b'/'.join(dispatchparts)
         else:
             dispatchparts = []
-            dispatchpath = ''
+            dispatchpath = b''
     else:
         dispatchparts = []
         dispatchpath = None
 
-    querystring = env.get('QUERY_STRING', '')
+    querystring = env.get(b'QUERY_STRING', b'')
 
     # We store as a list so we have ordering information. We also store as
     # a dict to facilitate fast lookup.
@@ -301,8 +301,8 @@
     # so keys match what likely went over the wire.
     headers = []
     for k, v in env.iteritems():
-        if k.startswith('HTTP_'):
-            headers.append((k[len('HTTP_') :].replace('_', '-'), v))
+        if k.startswith(b'HTTP_'):
+            headers.append((k[len(b'HTTP_') :].replace(b'_', b'-'), v))
 
     from . import wsgiheaders  # avoid cycle
 
@@ -312,28 +312,28 @@
     # sent. But for all intents and purposes it should be OK to lie about
     # this, since a consumer will either either value to determine how many
     # bytes are available to read.
-    if 'CONTENT_LENGTH' in env and 'HTTP_CONTENT_LENGTH' not in env:
-        headers['Content-Length'] = env['CONTENT_LENGTH']
+    if b'CONTENT_LENGTH' in env and b'HTTP_CONTENT_LENGTH' not in env:
+        headers[b'Content-Length'] = env[b'CONTENT_LENGTH']
 
-    if 'CONTENT_TYPE' in env and 'HTTP_CONTENT_TYPE' not in env:
-        headers['Content-Type'] = env['CONTENT_TYPE']
+    if b'CONTENT_TYPE' in env and b'HTTP_CONTENT_TYPE' not in env:
+        headers[b'Content-Type'] = env[b'CONTENT_TYPE']
 
     if bodyfh is None:
-        bodyfh = env['wsgi.input']
-        if 'Content-Length' in headers:
+        bodyfh = env[b'wsgi.input']
+        if b'Content-Length' in headers:
             bodyfh = util.cappedreader(
-                bodyfh, int(headers['Content-Length'] or '0')
+                bodyfh, int(headers[b'Content-Length'] or b'0')
             )
 
     return parsedrequest(
-        method=env['REQUEST_METHOD'],
+        method=env[b'REQUEST_METHOD'],
         url=fullurl,
         baseurl=baseurl,
         advertisedurl=advertisedfullurl,
         advertisedbaseurl=advertisedbaseurl,
-        urlscheme=env['wsgi.url_scheme'],
-        remoteuser=env.get('REMOTE_USER'),
-        remotehost=env.get('REMOTE_HOST'),
+        urlscheme=env[b'wsgi.url_scheme'],
+        remoteuser=env.get(b'REMOTE_USER'),
+        remotehost=env.get(b'REMOTE_HOST'),
         apppath=apppath,
         dispatchparts=dispatchparts,
         dispatchpath=dispatchpath,
@@ -421,7 +421,7 @@
             or self._bodygen is not None
             or self._bodywillwrite
         ):
-            raise error.ProgrammingError('cannot define body multiple times')
+            raise error.ProgrammingError(b'cannot define body multiple times')
 
     def setbodybytes(self, b):
         """Define the response body as static bytes.
@@ -430,7 +430,7 @@
         """
         self._verifybody()
         self._bodybytes = b
-        self.headers['Content-Length'] = '%d' % len(b)
+        self.headers[b'Content-Length'] = b'%d' % len(b)
 
     def setbodygen(self, gen):
         """Define the response body as a generator of bytes."""
@@ -463,19 +463,21 @@
         Calling this method multiple times is not allowed.
         """
         if self._started:
-            raise error.ProgrammingError('sendresponse() called multiple times')
+            raise error.ProgrammingError(
+                b'sendresponse() called multiple times'
+            )
 
         self._started = True
 
         if not self.status:
-            raise error.ProgrammingError('status line not defined')
+            raise error.ProgrammingError(b'status line not defined')
 
         if (
             self._bodybytes is None
             and self._bodygen is None
             and not self._bodywillwrite
         ):
-            raise error.ProgrammingError('response body not defined')
+            raise error.ProgrammingError(b'response body not defined')
 
         # RFC 7232 Section 4.1 states that a 304 MUST generate one of
         # {Cache-Control, Content-Location, Date, ETag, Expires, Vary}
@@ -484,11 +486,11 @@
         # states that no response body can be issued. Content-Length can
         # be sent. But if it is present, it should be the size of the response
         # that wasn't transferred.
-        if self.status.startswith('304 '):
+        if self.status.startswith(b'304 '):
             # setbodybytes('') will set C-L to 0. This doesn't conform with the
             # spec. So remove it.
-            if self.headers.get('Content-Length') == '0':
-                del self.headers['Content-Length']
+            if self.headers.get(b'Content-Length') == b'0':
+                del self.headers[b'Content-Length']
 
             # Strictly speaking, this is too strict. But until it causes
             # problems, let's be strict.
@@ -497,24 +499,24 @@
                 for k in self.headers.keys()
                 if k.lower()
                 not in (
-                    'date',
-                    'etag',
-                    'expires',
-                    'cache-control',
-                    'content-location',
-                    'content-security-policy',
-                    'vary',
+                    b'date',
+                    b'etag',
+                    b'expires',
+                    b'cache-control',
+                    b'content-location',
+                    b'content-security-policy',
+                    b'vary',
                 )
             }
             if badheaders:
                 raise error.ProgrammingError(
-                    'illegal header on 304 response: %s'
-                    % ', '.join(sorted(badheaders))
+                    b'illegal header on 304 response: %s'
+                    % b', '.join(sorted(badheaders))
                 )
 
             if self._bodygen is not None or self._bodywillwrite:
                 raise error.ProgrammingError(
-                    "must use setbodybytes('') with " "304 responses"
+                    b"must use setbodybytes('') with " b"304 responses"
                 )
 
         # Various HTTP clients (notably httplib) won't read the HTTP response
@@ -530,12 +532,12 @@
         # If the client sent Expect: 100-continue, we assume it is smart enough
         # to deal with the server sending a response before reading the request.
         # (httplib doesn't do this.)
-        if self._req.headers.get('Expect', '').lower() == '100-continue':
+        if self._req.headers.get(b'Expect', b'').lower() == b'100-continue':
             pass
         # Only tend to request methods that have bodies. Strictly speaking,
         # we should sniff for a body. But this is fine for our existing
         # WSGI applications.
-        elif self._req.method not in ('POST', 'PUT'):
+        elif self._req.method not in (b'POST', b'PUT'):
             pass
         else:
             # If we don't know how much data to read, there's no guarantee
@@ -555,7 +557,7 @@
                 drain = True
 
         if close:
-            self.headers['Connection'] = 'Close'
+            self.headers[b'Connection'] = b'Close'
 
         if drain:
             assert isinstance(self._req.bodyfh, util.cappedreader)
@@ -584,7 +586,7 @@
         elif self._bodywillwrite:
             self._bodywritefn = write
         else:
-            error.ProgrammingError('do not know how to send body')
+            error.ProgrammingError(b'do not know how to send body')
 
     def getbodyfile(self):
         """Obtain a file object like object representing the response body.
@@ -597,13 +599,13 @@
         ``[]``.
         """
         if not self._bodywillwrite:
-            raise error.ProgrammingError('must call setbodywillwrite() first')
+            raise error.ProgrammingError(b'must call setbodywillwrite() first')
 
         if not self._started:
             raise error.ProgrammingError(
-                'must call sendresponse() first; did '
-                'you remember to consume it since it '
-                'is a generator?'
+                b'must call sendresponse() first; did '
+                b'you remember to consume it since it '
+                b'is a generator?'
             )
 
         assert self._bodywritefn
--- a/mercurial/hgweb/server.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/hgweb/server.py	Sun Oct 06 09:48:39 2019 -0400
@@ -54,7 +54,7 @@
         pass
 
     def write(self, str):
-        self.writelines(str.split('\n'))
+        self.writelines(str.split(b'\n'))
 
     def writelines(self, seq):
         for msg in seq:
@@ -63,7 +63,7 @@
 
 class _httprequesthandler(httpservermod.basehttprequesthandler):
 
-    url_scheme = 'http'
+    url_scheme = b'http'
 
     @staticmethod
     def preparehttpserver(httpserver, ui):
@@ -83,7 +83,7 @@
                     format % args,
                 )
             )
-            + '\n'
+            + b'\n'
         )
         fp.flush()
 
@@ -95,7 +95,7 @@
 
     def log_request(self, code=r'-', size=r'-'):
         xheaders = []
-        if util.safehasattr(self, 'headers'):
+        if util.safehasattr(self, b'headers'):
             xheaders = [
                 h for h in self.headers.items() if h[0].startswith(r'x-')
             ]
@@ -156,7 +156,7 @@
             self.server.prefix + b'/'
         ):
             self._start_response(pycompat.strurl(common.statusmessage(404)), [])
-            if self.command == 'POST':
+            if self.command == b'POST':
                 # Paranoia: tell the client we're going to close the
                 # socket so they don't try and reuse a socket that
                 # might have a POST body waiting to confuse us. We do
@@ -206,7 +206,7 @@
         env[r'SERVER_PROTOCOL'] = self.request_version
         env[r'wsgi.version'] = (1, 0)
         env[r'wsgi.url_scheme'] = pycompat.sysstr(self.url_scheme)
-        if env.get(r'HTTP_EXPECT', '').lower() == '100-continue':
+        if env.get(r'HTTP_EXPECT', b'').lower() == b'100-continue':
             self.rfile = common.continuereader(self.rfile, self.wfile.write)
 
         env[r'wsgi.input'] = self.rfile
@@ -214,7 +214,7 @@
         env[r'wsgi.multithread'] = isinstance(
             self.server, socketserver.ThreadingMixIn
         )
-        if util.safehasattr(socketserver, 'ForkingMixIn'):
+        if util.safehasattr(socketserver, b'ForkingMixIn'):
             env[r'wsgi.multiprocess'] = isinstance(
                 self.server, socketserver.ForkingMixIn
             )
@@ -238,7 +238,7 @@
     def send_headers(self):
         if not self.saved_status:
             raise AssertionError(
-                "Sending headers before " "start_response() called"
+                b"Sending headers before " b"start_response() called"
             )
         saved_status = self.saved_status.split(None, 1)
         saved_status[0] = int(saved_status[0])
@@ -274,24 +274,24 @@
 
     def _write(self, data):
         if not self.saved_status:
-            raise AssertionError("data written before start_response() called")
+            raise AssertionError(b"data written before start_response() called")
         elif not self.sent_headers:
             self.send_headers()
         if self.length is not None:
             if len(data) > self.length:
                 raise AssertionError(
-                    "Content-length header sent, but more "
-                    "bytes than specified are being written."
+                    b"Content-length header sent, but more "
+                    b"bytes than specified are being written."
                 )
             self.length = self.length - len(data)
         elif self._chunked and data:
-            data = '%x\r\n%s\r\n' % (len(data), data)
+            data = b'%x\r\n%s\r\n' % (len(data), data)
         self.wfile.write(data)
         self.wfile.flush()
 
     def _done(self):
         if self._chunked:
-            self.wfile.write('0\r\n\r\n')
+            self.wfile.write(b'0\r\n\r\n')
             self.wfile.flush()
 
     def version_string(self):
@@ -303,7 +303,7 @@
 class _httprequesthandlerssl(_httprequesthandler):
     """HTTPS handler based on Python's ssl module"""
 
-    url_scheme = 'https'
+    url_scheme = b'https'
 
     @staticmethod
     def preparehttpserver(httpserver, ui):
@@ -312,14 +312,14 @@
 
             sslutil.modernssl
         except ImportError:
-            raise error.Abort(_("SSL support is unavailable"))
+            raise error.Abort(_(b"SSL support is unavailable"))
 
-        certfile = ui.config('web', 'certificate')
+        certfile = ui.config(b'web', b'certificate')
 
         # These config options are currently only meant for testing. Use
         # at your own risk.
-        cafile = ui.config('devel', 'servercafile')
-        reqcert = ui.configbool('devel', 'serverrequirecert')
+        cafile = ui.config(b'devel', b'servercafile')
+        reqcert = ui.configbool(b'devel', b'serverrequirecert')
 
         httpserver.socket = sslutil.wrapserversocket(
             httpserver.socket,
@@ -341,7 +341,7 @@
     threading.activeCount()  # silence pyflakes and bypass demandimport
     _mixin = socketserver.ThreadingMixIn
 except ImportError:
-    if util.safehasattr(os, "fork"):
+    if util.safehasattr(os, b"fork"):
         _mixin = socketserver.ForkingMixIn
     else:
 
@@ -350,8 +350,8 @@
 
 
 def openlog(opt, default):
-    if opt and opt != '-':
-        return open(opt, 'ab')
+    if opt and opt != b'-':
+        return open(opt, b'ab')
     return default
 
 
@@ -368,20 +368,20 @@
 
         handler.preparehttpserver(self, ui)
 
-        prefix = ui.config('web', 'prefix')
+        prefix = ui.config(b'web', b'prefix')
         if prefix:
-            prefix = '/' + prefix.strip('/')
+            prefix = b'/' + prefix.strip(b'/')
         self.prefix = prefix
 
-        alog = openlog(ui.config('web', 'accesslog'), ui.fout)
-        elog = openlog(ui.config('web', 'errorlog'), ui.ferr)
+        alog = openlog(ui.config(b'web', b'accesslog'), ui.fout)
+        elog = openlog(ui.config(b'web', b'errorlog'), ui.ferr)
         self.accesslog = alog
         self.errorlog = elog
 
         self.addr, self.port = self.socket.getsockname()[0:2]
         self.fqaddr = socket.getfqdn(addr[0])
 
-        self.serverheader = ui.config('web', 'server-header')
+        self.serverheader = ui.config(b'web', b'server-header')
 
 
 class IPv6HTTPServer(MercurialHTTPServer):
@@ -389,18 +389,18 @@
 
     def __init__(self, *args, **kwargs):
         if self.address_family is None:
-            raise error.RepoError(_('IPv6 is not available on this system'))
+            raise error.RepoError(_(b'IPv6 is not available on this system'))
         super(IPv6HTTPServer, self).__init__(*args, **kwargs)
 
 
 def create_server(ui, app):
 
-    if ui.config('web', 'certificate'):
+    if ui.config(b'web', b'certificate'):
         handler = _httprequesthandlerssl
     else:
         handler = _httprequesthandler
 
-    if ui.configbool('web', 'ipv6'):
+    if ui.configbool(b'web', b'ipv6'):
         cls = IPv6HTTPServer
     else:
         cls = MercurialHTTPServer
@@ -423,16 +423,16 @@
         except AttributeError:
             reload(sys)
         oldenc = sys.getdefaultencoding()
-        sys.setdefaultencoding("latin1")  # or any full 8-bit encoding
+        sys.setdefaultencoding(b"latin1")  # or any full 8-bit encoding
         mimetypes.init()
         sys.setdefaultencoding(oldenc)
 
-    address = ui.config('web', 'address')
-    port = util.getport(ui.config('web', 'port'))
+    address = ui.config(b'web', b'address')
+    port = util.getport(ui.config(b'web', b'port'))
     try:
         return cls(ui, app, (address, port), handler)
     except socket.error as inst:
         raise error.Abort(
-            _("cannot start server at '%s:%d': %s")
+            _(b"cannot start server at '%s:%d': %s")
             % (address, port, encoding.strtolocal(inst.args[1]))
         )
--- a/mercurial/hgweb/webcommands.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/hgweb/webcommands.py	Sun Oct 06 09:48:39 2019 -0400
@@ -79,7 +79,7 @@
         return func
 
 
-@webcommand('log')
+@webcommand(b'log')
 def log(web):
     """
     /log[/{revision}[/{path}]]
@@ -96,17 +96,17 @@
     file will be shown. This form is equivalent to the ``filelog`` handler.
     """
 
-    if web.req.qsparams.get('file'):
+    if web.req.qsparams.get(b'file'):
         return filelog(web)
     else:
         return changelog(web)
 
 
-@webcommand('rawfile')
+@webcommand(b'rawfile')
 def rawfile(web):
-    guessmime = web.configbool('web', 'guessmime')
+    guessmime = web.configbool(b'web', b'guessmime')
 
-    path = webutil.cleanpath(web.repo, web.req.qsparams.get('file', ''))
+    path = webutil.cleanpath(web.repo, web.req.qsparams.get(b'file', b''))
     if not path:
         return manifest(web)
 
@@ -120,25 +120,27 @@
 
     path = fctx.path()
     text = fctx.data()
-    mt = 'application/binary'
+    mt = b'application/binary'
     if guessmime:
         mt = mimetypes.guess_type(pycompat.fsdecode(path))[0]
         if mt is None:
             if stringutil.binary(text):
-                mt = 'application/binary'
+                mt = b'application/binary'
             else:
-                mt = 'text/plain'
+                mt = b'text/plain'
         else:
             mt = pycompat.sysbytes(mt)
 
-    if mt.startswith('text/'):
-        mt += '; charset="%s"' % encoding.encoding
+    if mt.startswith(b'text/'):
+        mt += b'; charset="%s"' % encoding.encoding
 
-    web.res.headers['Content-Type'] = mt
+    web.res.headers[b'Content-Type'] = mt
     filename = (
-        path.rpartition('/')[-1].replace('\\', '\\\\').replace('"', '\\"')
+        path.rpartition(b'/')[-1].replace(b'\\', b'\\\\').replace(b'"', b'\\"')
     )
-    web.res.headers['Content-Disposition'] = 'inline; filename="%s"' % filename
+    web.res.headers[b'Content-Disposition'] = (
+        b'inline; filename="%s"' % filename
+    )
     web.res.setbodybytes(text)
     return web.res.sendresponse()
 
@@ -154,19 +156,19 @@
             mimetypes.guess_type(pycompat.fsdecode(f))[0]
             or r'application/octet-stream'
         )
-        text = '(binary:%s)' % mt
+        text = b'(binary:%s)' % mt
 
     def lines(context):
         for lineno, t in enumerate(text.splitlines(True)):
             yield {
-                "line": t,
-                "lineid": "l%d" % (lineno + 1),
-                "linenumber": "% 6d" % (lineno + 1),
-                "parity": next(parity),
+                b"line": t,
+                b"lineid": b"l%d" % (lineno + 1),
+                b"linenumber": b"% 6d" % (lineno + 1),
+                b"parity": next(parity),
             }
 
     return web.sendtemplate(
-        'filerevision',
+        b'filerevision',
         file=f,
         path=webutil.up(f),
         text=templateutil.mappinggenerator(lines),
@@ -178,7 +180,7 @@
     )
 
 
-@webcommand('file')
+@webcommand(b'file')
 def file(web):
     """
     /file/{revision}[/{path}]
@@ -198,10 +200,10 @@
     If ``path`` is not defined, information about the root directory will
     be rendered.
     """
-    if web.req.qsparams.get('style') == 'raw':
+    if web.req.qsparams.get(b'style') == b'raw':
         return rawfile(web)
 
-    path = webutil.cleanpath(web.repo, web.req.qsparams.get('file', ''))
+    path = webutil.cleanpath(web.repo, web.req.qsparams.get(b'file', b''))
     if not path:
         return manifest(web)
     try:
@@ -214,9 +216,9 @@
 
 
 def _search(web):
-    MODE_REVISION = 'rev'
-    MODE_KEYWORD = 'keyword'
-    MODE_REVSET = 'revset'
+    MODE_REVISION = b'rev'
+    MODE_KEYWORD = b'keyword'
+    MODE_REVSET = b'revset'
 
     def revsearch(ctx):
         yield ctx
@@ -242,7 +244,7 @@
                 if not (
                     q in lower(ctx.user())
                     or q in lower(ctx.description())
-                    or q in lower(" ".join(ctx.files()))
+                    or q in lower(b" ".join(ctx.files()))
                 ):
                     miss = 1
                     break
@@ -256,9 +258,9 @@
             yield web.repo[r]
 
     searchfuncs = {
-        MODE_REVISION: (revsearch, 'exact revision search'),
-        MODE_KEYWORD: (keywordsearch, 'literal keyword search'),
-        MODE_REVSET: (revsetsearch, 'revset expression search'),
+        MODE_REVISION: (revsearch, b'exact revision search'),
+        MODE_KEYWORD: (keywordsearch, b'literal keyword search'),
+        MODE_REVSET: (revsetsearch, b'revset expression search'),
     }
 
     def getsearchmode(query):
@@ -271,7 +273,7 @@
         else:
             return MODE_REVISION, ctx
 
-        revdef = 'reverse(%s)' % query
+        revdef = b'reverse(%s)' % query
         try:
             tree = revsetlang.parse(revdef)
         except error.ParseError:
@@ -283,7 +285,7 @@
             return MODE_KEYWORD, query
 
         if any(
-            (token, (value or '')[:3]) == ('string', 're:')
+            (token, (value or b'')[:3]) == (b'string', b're:')
             for token, value, pos in revsetlang.tokenize(revdef)
         ):
             return MODE_KEYWORD, query
@@ -316,15 +318,15 @@
         for ctx in searchfunc[0](funcarg):
             count += 1
             n = scmutil.binnode(ctx)
-            showtags = webutil.showtag(web.repo, 'changelogtag', n)
+            showtags = webutil.showtag(web.repo, b'changelogtag', n)
             files = webutil.listfilediffs(ctx.files(), n, web.maxfiles)
 
             lm = webutil.commonentry(web.repo, ctx)
             lm.update(
                 {
-                    'parity': next(parity),
-                    'changelogtag': showtags,
-                    'files': files,
+                    b'parity': next(parity),
+                    b'changelogtag': showtags,
+                    b'files': files,
                 }
             )
             yield lm
@@ -332,27 +334,27 @@
             if count >= revcount:
                 break
 
-    query = web.req.qsparams['rev']
+    query = web.req.qsparams[b'rev']
     revcount = web.maxchanges
-    if 'revcount' in web.req.qsparams:
+    if b'revcount' in web.req.qsparams:
         try:
-            revcount = int(web.req.qsparams.get('revcount', revcount))
+            revcount = int(web.req.qsparams.get(b'revcount', revcount))
             revcount = max(revcount, 1)
-            web.tmpl.defaults['sessionvars']['revcount'] = revcount
+            web.tmpl.defaults[b'sessionvars'][b'revcount'] = revcount
         except ValueError:
             pass
 
-    lessvars = copy.copy(web.tmpl.defaults['sessionvars'])
-    lessvars['revcount'] = max(revcount // 2, 1)
-    lessvars['rev'] = query
-    morevars = copy.copy(web.tmpl.defaults['sessionvars'])
-    morevars['revcount'] = revcount * 2
-    morevars['rev'] = query
+    lessvars = copy.copy(web.tmpl.defaults[b'sessionvars'])
+    lessvars[b'revcount'] = max(revcount // 2, 1)
+    lessvars[b'rev'] = query
+    morevars = copy.copy(web.tmpl.defaults[b'sessionvars'])
+    morevars[b'revcount'] = revcount * 2
+    morevars[b'rev'] = query
 
     mode, funcarg = getsearchmode(query)
 
-    if 'forcekw' in web.req.qsparams:
-        showforcekw = ''
+    if b'forcekw' in web.req.qsparams:
+        showforcekw = b''
         showunforcekw = searchfuncs[mode][1]
         mode = MODE_KEYWORD
         funcarg = query
@@ -360,21 +362,21 @@
         if mode != MODE_KEYWORD:
             showforcekw = searchfuncs[MODE_KEYWORD][1]
         else:
-            showforcekw = ''
-        showunforcekw = ''
+            showforcekw = b''
+        showunforcekw = b''
 
     searchfunc = searchfuncs[mode]
 
-    tip = web.repo['tip']
+    tip = web.repo[b'tip']
     parity = paritygen(web.stripecount)
 
     return web.sendtemplate(
-        'search',
+        b'search',
         query=query,
         node=tip.hex(),
-        symrev='tip',
-        entries=templateutil.mappinggenerator(changelist, name='searchentry'),
-        archives=web.archivelist('tip'),
+        symrev=b'tip',
+        entries=templateutil.mappinggenerator(changelist, name=b'searchentry'),
+        archives=web.archivelist(b'tip'),
         morevars=morevars,
         lessvars=lessvars,
         modedesc=searchfunc[1],
@@ -383,7 +385,7 @@
     )
 
 
-@webcommand('changelog')
+@webcommand(b'changelog')
 def changelog(web, shortlog=False):
     """
     /changelog[/{revision}]
@@ -409,15 +411,15 @@
     For non-searches, the ``changelog`` template will be rendered.
     """
 
-    query = ''
-    if 'node' in web.req.qsparams:
+    query = b''
+    if b'node' in web.req.qsparams:
         ctx = webutil.changectx(web.repo, web.req)
         symrev = webutil.symrevorshortnode(web.req, ctx)
-    elif 'rev' in web.req.qsparams:
+    elif b'rev' in web.req.qsparams:
         return _search(web)
     else:
-        ctx = web.repo['tip']
-        symrev = 'tip'
+        ctx = web.repo[b'tip']
+        symrev = b'tip'
 
     def changelist(maxcount):
         revs = []
@@ -432,18 +434,18 @@
     else:
         revcount = web.maxchanges
 
-    if 'revcount' in web.req.qsparams:
+    if b'revcount' in web.req.qsparams:
         try:
-            revcount = int(web.req.qsparams.get('revcount', revcount))
+            revcount = int(web.req.qsparams.get(b'revcount', revcount))
             revcount = max(revcount, 1)
-            web.tmpl.defaults['sessionvars']['revcount'] = revcount
+            web.tmpl.defaults[b'sessionvars'][b'revcount'] = revcount
         except ValueError:
             pass
 
-    lessvars = copy.copy(web.tmpl.defaults['sessionvars'])
-    lessvars['revcount'] = max(revcount // 2, 1)
-    morevars = copy.copy(web.tmpl.defaults['sessionvars'])
-    morevars['revcount'] = revcount * 2
+    lessvars = copy.copy(web.tmpl.defaults[b'sessionvars'])
+    lessvars[b'revcount'] = max(revcount // 2, 1)
+    morevars = copy.copy(web.tmpl.defaults[b'sessionvars'])
+    morevars[b'revcount'] = revcount * 2
 
     count = len(web.repo)
     pos = ctx.rev()
@@ -460,7 +462,7 @@
         nextentry = []
 
     return web.sendtemplate(
-        'shortlog' if shortlog else 'changelog',
+        b'shortlog' if shortlog else b'changelog',
         changenav=changenav,
         node=ctx.hex(),
         rev=pos,
@@ -469,7 +471,7 @@
         entries=templateutil.mappinglist(entries),
         latestentry=templateutil.mappinglist(latestentry),
         nextentry=templateutil.mappinglist(nextentry),
-        archives=web.archivelist('tip'),
+        archives=web.archivelist(b'tip'),
         revcount=revcount,
         morevars=morevars,
         lessvars=lessvars,
@@ -477,7 +479,7 @@
     )
 
 
-@webcommand('shortlog')
+@webcommand(b'shortlog')
 def shortlog(web):
     """
     /shortlog
@@ -492,7 +494,7 @@
     return changelog(web, shortlog=True)
 
 
-@webcommand('changeset')
+@webcommand(b'changeset')
 def changeset(web):
     """
     /changeset[/{revision}]
@@ -510,10 +512,10 @@
     """
     ctx = webutil.changectx(web.repo, web.req)
 
-    return web.sendtemplate('changeset', **webutil.changesetentry(web, ctx))
+    return web.sendtemplate(b'changeset', **webutil.changesetentry(web, ctx))
 
 
-rev = webcommand('rev')(changeset)
+rev = webcommand(b'rev')(changeset)
 
 
 def decodepath(path):
@@ -525,7 +527,7 @@
     return path
 
 
-@webcommand('manifest')
+@webcommand(b'manifest')
 def manifest(web):
     """
     /manifest[/{revision}[/{path}]]
@@ -542,13 +544,13 @@
 
     The ``manifest`` template will be rendered for this handler.
     """
-    if 'node' in web.req.qsparams:
+    if b'node' in web.req.qsparams:
         ctx = webutil.changectx(web.repo, web.req)
         symrev = webutil.symrevorshortnode(web.req, ctx)
     else:
-        ctx = web.repo['tip']
-        symrev = 'tip'
-    path = webutil.cleanpath(web.repo, web.req.qsparams.get('file', ''))
+        ctx = web.repo[b'tip']
+        symrev = b'tip'
+    path = webutil.cleanpath(web.repo, web.req.qsparams.get(b'file', b''))
     mf = ctx.manifest()
     node = scmutil.binnode(ctx)
 
@@ -556,10 +558,10 @@
     dirs = {}
     parity = paritygen(web.stripecount)
 
-    if path and path[-1:] != "/":
-        path += "/"
+    if path and path[-1:] != b"/":
+        path += b"/"
     l = len(path)
-    abspath = "/" + path
+    abspath = b"/" + path
 
     for full, n in mf.iteritems():
         # the virtual path (working copy path) used for the full
@@ -569,7 +571,7 @@
         if f[:l] != path:
             continue
         remain = f[l:]
-        elements = remain.split('/')
+        elements = remain.split(b'/')
         if len(elements) == 1:
             files[remain] = full
         else:
@@ -583,7 +585,7 @@
             h[None] = None  # denotes files present
 
     if mf and not files and not dirs:
-        raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path)
+        raise ErrorResponse(HTTP_NOT_FOUND, b'path not found: ' + path)
 
     def filelist(context):
         for f in sorted(files):
@@ -591,12 +593,12 @@
 
             fctx = ctx.filectx(full)
             yield {
-                "file": full,
-                "parity": next(parity),
-                "basename": f,
-                "date": fctx.date(),
-                "size": fctx.size(),
-                "permissions": mf.flags(full),
+                b"file": full,
+                b"parity": next(parity),
+                b"basename": f,
+                b"date": fctx.date(),
+                b"size": fctx.size(),
+                b"permissions": mf.flags(full),
             }
 
     def dirlist(context):
@@ -610,16 +612,16 @@
                     emptydirs.append(k)
                 h = v
 
-            path = "%s%s" % (abspath, d)
+            path = b"%s%s" % (abspath, d)
             yield {
-                "parity": next(parity),
-                "path": path,
-                "emptydirs": "/".join(emptydirs),
-                "basename": d,
+                b"parity": next(parity),
+                b"path": path,
+                b"emptydirs": b"/".join(emptydirs),
+                b"basename": d,
             }
 
     return web.sendtemplate(
-        'manifest',
+        b'manifest',
         symrev=symrev,
         path=abspath,
         up=webutil.up(abspath),
@@ -631,7 +633,7 @@
     )
 
 
-@webcommand('tags')
+@webcommand(b'tags')
 def tags(web):
     """
     /tags
@@ -649,19 +651,19 @@
     def entries(context, notip, latestonly):
         t = i
         if notip:
-            t = [(k, n) for k, n in i if k != "tip"]
+            t = [(k, n) for k, n in i if k != b"tip"]
         if latestonly:
             t = t[:1]
         for k, n in t:
             yield {
-                "parity": next(parity),
-                "tag": k,
-                "date": web.repo[n].date(),
-                "node": hex(n),
+                b"parity": next(parity),
+                b"tag": k,
+                b"date": web.repo[n].date(),
+                b"node": hex(n),
             }
 
     return web.sendtemplate(
-        'tags',
+        b'tags',
         node=hex(web.repo.changelog.tip()),
         entries=templateutil.mappinggenerator(entries, args=(False, False)),
         entriesnotip=templateutil.mappinggenerator(entries, args=(True, False)),
@@ -669,7 +671,7 @@
     )
 
 
-@webcommand('bookmarks')
+@webcommand(b'bookmarks')
 def bookmarks(web):
     """
     /bookmarks
@@ -692,10 +694,10 @@
             t = i[:1]
         for k, n in t:
             yield {
-                "parity": next(parity),
-                "bookmark": k,
-                "date": web.repo[n].date(),
-                "node": hex(n),
+                b"parity": next(parity),
+                b"bookmark": k,
+                b"date": web.repo[n].date(),
+                b"node": hex(n),
             }
 
     if i:
@@ -705,15 +707,15 @@
     lastdate = web.repo[latestrev].date()
 
     return web.sendtemplate(
-        'bookmarks',
+        b'bookmarks',
         node=hex(web.repo.changelog.tip()),
-        lastchange=templateutil.mappinglist([{'date': lastdate}]),
+        lastchange=templateutil.mappinglist([{b'date': lastdate}]),
         entries=templateutil.mappinggenerator(entries, args=(False,)),
         latestentry=templateutil.mappinggenerator(entries, args=(True,)),
     )
 
 
-@webcommand('branches')
+@webcommand(b'branches')
 def branches(web):
     """
     /branches
@@ -731,14 +733,14 @@
     latestentry = webutil.branchentries(web.repo, web.stripecount, 1)
 
     return web.sendtemplate(
-        'branches',
+        b'branches',
         node=hex(web.repo.changelog.tip()),
         entries=entries,
         latestentry=latestentry,
     )
 
 
-@webcommand('summary')
+@webcommand(b'summary')
 def summary(web):
     """
     /summary
@@ -757,7 +759,7 @@
         parity = paritygen(web.stripecount)
         count = 0
         for k, n in i:
-            if k == "tip":  # skip tip
+            if k == b"tip":  # skip tip
                 continue
 
             count += 1
@@ -765,10 +767,10 @@
                 break
 
             yield {
-                'parity': next(parity),
-                'tag': k,
-                'node': hex(n),
-                'date': web.repo[n].date(),
+                b'parity': next(parity),
+                b'tag': k,
+                b'node': hex(n),
+                b'date': web.repo[n].date(),
             }
 
     def bookmarks(context):
@@ -778,10 +780,10 @@
         marks = sorted(marks, key=sortkey, reverse=True)
         for k, n in marks[:10]:  # limit to 10 bookmarks
             yield {
-                'parity': next(parity),
-                'bookmark': k,
-                'date': web.repo[n].date(),
-                'node': hex(n),
+                b'parity': next(parity),
+                b'bookmark': k,
+                b'date': web.repo[n].date(),
+                b'node': hex(n),
             }
 
     def changelist(context):
@@ -793,41 +795,41 @@
         for i in revs:
             ctx = web.repo[i]
             lm = webutil.commonentry(web.repo, ctx)
-            lm['parity'] = next(parity)
+            lm[b'parity'] = next(parity)
             l.append(lm)
 
         for entry in reversed(l):
             yield entry
 
-    tip = web.repo['tip']
+    tip = web.repo[b'tip']
     count = len(web.repo)
     start = max(0, count - web.maxchanges)
     end = min(count, start + web.maxchanges)
 
-    desc = web.config("web", "description")
+    desc = web.config(b"web", b"description")
     if not desc:
-        desc = 'unknown'
-    labels = web.configlist('web', 'labels')
+        desc = b'unknown'
+    labels = web.configlist(b'web', b'labels')
 
     return web.sendtemplate(
-        'summary',
+        b'summary',
         desc=desc,
-        owner=get_contact(web.config) or 'unknown',
+        owner=get_contact(web.config) or b'unknown',
         lastchange=tip.date(),
-        tags=templateutil.mappinggenerator(tagentries, name='tagentry'),
+        tags=templateutil.mappinggenerator(tagentries, name=b'tagentry'),
         bookmarks=templateutil.mappinggenerator(bookmarks),
         branches=webutil.branchentries(web.repo, web.stripecount, 10),
         shortlog=templateutil.mappinggenerator(
-            changelist, name='shortlogentry'
+            changelist, name=b'shortlogentry'
         ),
         node=tip.hex(),
-        symrev='tip',
-        archives=web.archivelist('tip'),
-        labels=templateutil.hybridlist(labels, name='label'),
+        symrev=b'tip',
+        archives=web.archivelist(b'tip'),
+        labels=templateutil.hybridlist(labels, name=b'label'),
     )
 
 
-@webcommand('filediff')
+@webcommand(b'filediff')
 def filediff(web):
     """
     /diff/{revision}/{path}
@@ -845,7 +847,7 @@
         fctx = webutil.filectx(web.repo, web.req)
     except LookupError:
         ctx = webutil.changectx(web.repo, web.req)
-        path = webutil.cleanpath(web.repo, web.req.qsparams['file'])
+        path = webutil.cleanpath(web.repo, web.req.qsparams[b'file'])
         if path not in ctx.files():
             raise
 
@@ -854,9 +856,9 @@
         ctx = fctx.changectx()
     basectx = ctx.p1()
 
-    style = web.config('web', 'style')
-    if 'style' in web.req.qsparams:
-        style = web.req.qsparams['style']
+    style = web.config(b'web', b'style')
+    if b'style' in web.req.qsparams:
+        style = web.req.qsparams[b'style']
 
     diffs = webutil.diffs(web, ctx, basectx, [path], style)
     if fctx is not None:
@@ -867,7 +869,7 @@
         ctx = ctx
 
     return web.sendtemplate(
-        'filediff',
+        b'filediff',
         file=path,
         symrev=webutil.symrevorshortnode(web.req, ctx),
         rename=rename,
@@ -876,10 +878,10 @@
     )
 
 
-diff = webcommand('diff')(filediff)
+diff = webcommand(b'diff')(filediff)
 
 
-@webcommand('comparison')
+@webcommand(b'comparison')
 def comparison(web):
     """
     /comparison/{revision}/{path}
@@ -897,15 +899,15 @@
     The ``filecomparison`` template is rendered.
     """
     ctx = webutil.changectx(web.repo, web.req)
-    if 'file' not in web.req.qsparams:
-        raise ErrorResponse(HTTP_NOT_FOUND, 'file not given')
-    path = webutil.cleanpath(web.repo, web.req.qsparams['file'])
+    if b'file' not in web.req.qsparams:
+        raise ErrorResponse(HTTP_NOT_FOUND, b'file not given')
+    path = webutil.cleanpath(web.repo, web.req.qsparams[b'file'])
 
-    parsecontext = lambda v: v == 'full' and -1 or int(v)
-    if 'context' in web.req.qsparams:
-        context = parsecontext(web.req.qsparams['context'])
+    parsecontext = lambda v: v == b'full' and -1 or int(v)
+    if b'context' in web.req.qsparams:
+        context = parsecontext(web.req.qsparams[b'context'])
     else:
-        context = parsecontext(web.config('web', 'comparisoncontext'))
+        context = parsecontext(web.config(b'web', b'comparisoncontext'))
 
     def filelines(f):
         if f.isbinary():
@@ -913,7 +915,7 @@
                 mimetypes.guess_type(pycompat.fsdecode(f.path()))[0]
                 or r'application/octet-stream'
             )
-            return [_('(binary file %s, hash: %s)') % (mt, hex(f.filenode()))]
+            return [_(b'(binary file %s, hash: %s)') % (mt, hex(f.filenode()))]
         return f.data().splitlines()
 
     fctx = None
@@ -944,7 +946,7 @@
         ctx = ctx
 
     return web.sendtemplate(
-        'filecomparison',
+        b'filecomparison',
         file=path,
         symrev=webutil.symrevorshortnode(web.req, ctx),
         rename=rename,
@@ -957,7 +959,7 @@
     )
 
 
-@webcommand('annotate')
+@webcommand(b'annotate')
 def annotate(web):
     """
     /annotate/{revision}/{path}
@@ -991,8 +993,8 @@
             parentscache[rev] = []
             for p in f.parents():
                 entry = {
-                    'node': p.hex(),
-                    'rev': p.rev(),
+                    b'node': p.hex(),
+                    b'rev': p.rev(),
                 }
                 parentscache[rev].append(entry)
 
@@ -1009,7 +1011,7 @@
                 dagop.annotateline(
                     fctx=fctx.filectx(fctx.filerev()),
                     lineno=1,
-                    text='(binary:%s)' % mt,
+                    text=b'(binary:%s)' % mt,
                 )
             ]
         else:
@@ -1027,29 +1029,29 @@
                 blockhead = None
             previousrev = rev
             yield {
-                "parity": next(parity),
-                "node": f.hex(),
-                "rev": rev,
-                "author": f.user(),
-                "parents": templateutil.mappinggenerator(parents, args=(f,)),
-                "desc": f.description(),
-                "extra": f.extra(),
-                "file": f.path(),
-                "blockhead": blockhead,
-                "blockparity": blockparity,
-                "targetline": aline.lineno,
-                "line": aline.text,
-                "lineno": lineno + 1,
-                "lineid": "l%d" % (lineno + 1),
-                "linenumber": "% 6d" % (lineno + 1),
-                "revdate": f.date(),
+                b"parity": next(parity),
+                b"node": f.hex(),
+                b"rev": rev,
+                b"author": f.user(),
+                b"parents": templateutil.mappinggenerator(parents, args=(f,)),
+                b"desc": f.description(),
+                b"extra": f.extra(),
+                b"file": f.path(),
+                b"blockhead": blockhead,
+                b"blockparity": blockparity,
+                b"targetline": aline.lineno,
+                b"line": aline.text,
+                b"lineno": lineno + 1,
+                b"lineid": b"l%d" % (lineno + 1),
+                b"linenumber": b"% 6d" % (lineno + 1),
+                b"revdate": f.date(),
             }
 
-    diffopts = webutil.difffeatureopts(web.req, web.repo.ui, 'annotate')
+    diffopts = webutil.difffeatureopts(web.req, web.repo.ui, b'annotate')
     diffopts = {k: getattr(diffopts, k) for k in diffopts.defaults}
 
     return web.sendtemplate(
-        'fileannotate',
+        b'fileannotate',
         file=f,
         annotate=templateutil.mappinggenerator(annotate),
         path=webutil.up(f),
@@ -1062,7 +1064,7 @@
     )
 
 
-@webcommand('filelog')
+@webcommand(b'filelog')
 def filelog(web):
     """
     /filelog/{revision}/{path}
@@ -1081,7 +1083,7 @@
         f = fctx.path()
         fl = fctx.filelog()
     except error.LookupError:
-        f = webutil.cleanpath(web.repo, web.req.qsparams['file'])
+        f = webutil.cleanpath(web.repo, web.req.qsparams[b'file'])
         fl = web.repo.file(f)
         numrevs = len(fl)
         if not numrevs:  # file doesn't exist at all
@@ -1096,27 +1098,29 @@
         fctx = web.repo.filectx(f, fl.linkrev(frev))
 
     revcount = web.maxshortchanges
-    if 'revcount' in web.req.qsparams:
+    if b'revcount' in web.req.qsparams:
         try:
-            revcount = int(web.req.qsparams.get('revcount', revcount))
+            revcount = int(web.req.qsparams.get(b'revcount', revcount))
             revcount = max(revcount, 1)
-            web.tmpl.defaults['sessionvars']['revcount'] = revcount
+            web.tmpl.defaults[b'sessionvars'][b'revcount'] = revcount
         except ValueError:
             pass
 
     lrange = webutil.linerange(web.req)
 
-    lessvars = copy.copy(web.tmpl.defaults['sessionvars'])
-    lessvars['revcount'] = max(revcount // 2, 1)
-    morevars = copy.copy(web.tmpl.defaults['sessionvars'])
-    morevars['revcount'] = revcount * 2
+    lessvars = copy.copy(web.tmpl.defaults[b'sessionvars'])
+    lessvars[b'revcount'] = max(revcount // 2, 1)
+    morevars = copy.copy(web.tmpl.defaults[b'sessionvars'])
+    morevars[b'revcount'] = revcount * 2
 
-    patch = 'patch' in web.req.qsparams
+    patch = b'patch' in web.req.qsparams
     if patch:
-        lessvars['patch'] = morevars['patch'] = web.req.qsparams['patch']
-    descend = 'descend' in web.req.qsparams
+        lessvars[b'patch'] = morevars[b'patch'] = web.req.qsparams[b'patch']
+    descend = b'descend' in web.req.qsparams
     if descend:
-        lessvars['descend'] = morevars['descend'] = web.req.qsparams['descend']
+        lessvars[b'descend'] = morevars[b'descend'] = web.req.qsparams[
+            b'descend'
+        ]
 
     count = fctx.filerev() + 1
     start = max(0, count - revcount)  # first rev on this page
@@ -1132,9 +1136,9 @@
     ]
     entries = []
 
-    diffstyle = web.config('web', 'style')
-    if 'style' in web.req.qsparams:
-        diffstyle = web.req.qsparams['style']
+    diffstyle = web.config(b'web', b'style')
+    if b'style' in web.req.qsparams:
+        diffstyle = web.req.qsparams[b'style']
 
     def diff(fctx, linerange=None):
         ctx = fctx.changectx()
@@ -1147,7 +1151,7 @@
             [path],
             diffstyle,
             linerange=linerange,
-            lineidprefix='%s-' % ctx.hex()[:12],
+            lineidprefix=b'%s-' % ctx.hex()[:12],
         )
 
     linerange = None
@@ -1169,19 +1173,19 @@
             lm = webutil.commonentry(repo, c)
             lm.update(
                 {
-                    'parity': next(parity),
-                    'filerev': c.rev(),
-                    'file': path,
-                    'diff': diffs,
-                    'linerange': webutil.formatlinerange(*lr),
-                    'rename': templateutil.mappinglist([]),
+                    b'parity': next(parity),
+                    b'filerev': c.rev(),
+                    b'file': path,
+                    b'diff': diffs,
+                    b'linerange': webutil.formatlinerange(*lr),
+                    b'rename': templateutil.mappinglist([]),
                 }
             )
             entries.append(lm)
             if i == revcount:
                 break
-        lessvars['linerange'] = webutil.formatlinerange(*lrange)
-        morevars['linerange'] = lessvars['linerange']
+        lessvars[b'linerange'] = webutil.formatlinerange(*lrange)
+        morevars[b'linerange'] = lessvars[b'linerange']
     else:
         for i in revs:
             iterfctx = fctx.filectx(i)
@@ -1191,11 +1195,11 @@
             lm = webutil.commonentry(repo, iterfctx)
             lm.update(
                 {
-                    'parity': next(parity),
-                    'filerev': i,
-                    'file': f,
-                    'diff': diffs,
-                    'rename': webutil.renamelink(iterfctx),
+                    b'parity': next(parity),
+                    b'filerev': i,
+                    b'file': f,
+                    b'diff': diffs,
+                    b'rename': webutil.renamelink(iterfctx),
                 }
             )
             entries.append(lm)
@@ -1206,7 +1210,7 @@
     latestentry = entries[:1]
 
     return web.sendtemplate(
-        'filelog',
+        b'filelog',
         file=f,
         nav=nav,
         symrev=webutil.symrevorshortnode(web.req, fctx),
@@ -1222,7 +1226,7 @@
     )
 
 
-@webcommand('archive')
+@webcommand(b'archive')
 def archive(web):
     """
     /archive/{revision}.{format}[/{path}]
@@ -1243,54 +1247,54 @@
     No template is used for this handler. Raw, binary content is generated.
     """
 
-    type_ = web.req.qsparams.get('type')
-    allowed = web.configlist("web", "allow-archive")
-    key = web.req.qsparams['node']
+    type_ = web.req.qsparams.get(b'type')
+    allowed = web.configlist(b"web", b"allow-archive")
+    key = web.req.qsparams[b'node']
 
     if type_ not in webutil.archivespecs:
-        msg = 'Unsupported archive type: %s' % stringutil.pprint(type_)
+        msg = b'Unsupported archive type: %s' % stringutil.pprint(type_)
         raise ErrorResponse(HTTP_NOT_FOUND, msg)
 
-    if not ((type_ in allowed or web.configbool("web", "allow" + type_))):
-        msg = 'Archive type not allowed: %s' % type_
+    if not ((type_ in allowed or web.configbool(b"web", b"allow" + type_))):
+        msg = b'Archive type not allowed: %s' % type_
         raise ErrorResponse(HTTP_FORBIDDEN, msg)
 
-    reponame = re.sub(br"\W+", "-", os.path.basename(web.reponame))
+    reponame = re.sub(br"\W+", b"-", os.path.basename(web.reponame))
     cnode = web.repo.lookup(key)
     arch_version = key
-    if cnode == key or key == 'tip':
+    if cnode == key or key == b'tip':
         arch_version = short(cnode)
-    name = "%s-%s" % (reponame, arch_version)
+    name = b"%s-%s" % (reponame, arch_version)
 
     ctx = webutil.changectx(web.repo, web.req)
     pats = []
     match = scmutil.match(ctx, [])
-    file = web.req.qsparams.get('file')
+    file = web.req.qsparams.get(b'file')
     if file:
-        pats = ['path:' + file]
-        match = scmutil.match(ctx, pats, default='path')
+        pats = [b'path:' + file]
+        match = scmutil.match(ctx, pats, default=b'path')
         if pats:
             files = [f for f in ctx.manifest().keys() if match(f)]
             if not files:
                 raise ErrorResponse(
-                    HTTP_NOT_FOUND, 'file(s) not found: %s' % file
+                    HTTP_NOT_FOUND, b'file(s) not found: %s' % file
                 )
 
     mimetype, artype, extension, encoding = webutil.archivespecs[type_]
 
-    web.res.headers['Content-Type'] = mimetype
-    web.res.headers['Content-Disposition'] = 'attachment; filename=%s%s' % (
+    web.res.headers[b'Content-Type'] = mimetype
+    web.res.headers[b'Content-Disposition'] = b'attachment; filename=%s%s' % (
         name,
         extension,
     )
 
     if encoding:
-        web.res.headers['Content-Encoding'] = encoding
+        web.res.headers[b'Content-Encoding'] = encoding
 
     web.res.setbodywillwrite()
     if list(web.res.sendresponse()):
         raise error.ProgrammingError(
-            'sendresponse() should not emit data ' 'if writing later'
+            b'sendresponse() should not emit data ' b'if writing later'
         )
 
     bodyfh = web.res.getbodyfile()
@@ -1302,29 +1306,29 @@
         artype,
         prefix=name,
         match=match,
-        subrepos=web.configbool("web", "archivesubrepos"),
+        subrepos=web.configbool(b"web", b"archivesubrepos"),
     )
 
     return []
 
 
-@webcommand('static')
+@webcommand(b'static')
 def static(web):
-    fname = web.req.qsparams['file']
+    fname = web.req.qsparams[b'file']
     # a repo owner may set web.static in .hg/hgrc to get any file
     # readable by the user running the CGI script
-    static = web.config("web", "static", untrusted=False)
+    static = web.config(b"web", b"static", untrusted=False)
     if not static:
         tp = web.templatepath or templater.templatepaths()
         if isinstance(tp, str):
             tp = [tp]
-        static = [os.path.join(p, 'static') for p in tp]
+        static = [os.path.join(p, b'static') for p in tp]
 
     staticfile(static, fname, web.res)
     return web.res.sendresponse()
 
 
-@webcommand('graph')
+@webcommand(b'graph')
 def graph(web):
     """
     /graph[/{revision}]
@@ -1348,32 +1352,32 @@
     This handler will render the ``graph`` template.
     """
 
-    if 'node' in web.req.qsparams:
+    if b'node' in web.req.qsparams:
         ctx = webutil.changectx(web.repo, web.req)
         symrev = webutil.symrevorshortnode(web.req, ctx)
     else:
-        ctx = web.repo['tip']
-        symrev = 'tip'
+        ctx = web.repo[b'tip']
+        symrev = b'tip'
     rev = ctx.rev()
 
     bg_height = 39
     revcount = web.maxshortchanges
-    if 'revcount' in web.req.qsparams:
+    if b'revcount' in web.req.qsparams:
         try:
-            revcount = int(web.req.qsparams.get('revcount', revcount))
+            revcount = int(web.req.qsparams.get(b'revcount', revcount))
             revcount = max(revcount, 1)
-            web.tmpl.defaults['sessionvars']['revcount'] = revcount
+            web.tmpl.defaults[b'sessionvars'][b'revcount'] = revcount
         except ValueError:
             pass
 
-    lessvars = copy.copy(web.tmpl.defaults['sessionvars'])
-    lessvars['revcount'] = max(revcount // 2, 1)
-    morevars = copy.copy(web.tmpl.defaults['sessionvars'])
-    morevars['revcount'] = revcount * 2
+    lessvars = copy.copy(web.tmpl.defaults[b'sessionvars'])
+    lessvars[b'revcount'] = max(revcount // 2, 1)
+    morevars = copy.copy(web.tmpl.defaults[b'sessionvars'])
+    morevars[b'revcount'] = revcount * 2
 
-    graphtop = web.req.qsparams.get('graphtop', ctx.hex())
-    graphvars = copy.copy(web.tmpl.defaults['sessionvars'])
-    graphvars['graphtop'] = graphtop
+    graphtop = web.req.qsparams.get(b'graphtop', ctx.hex())
+    graphvars = copy.copy(web.tmpl.defaults[b'sessionvars'])
+    graphvars[b'graphtop'] = graphtop
 
     count = len(web.repo)
     pos = rev
@@ -1426,10 +1430,10 @@
     def jsdata(context):
         for (id, type, ctx, vtx, edges) in fulltree():
             yield {
-                'node': pycompat.bytestr(ctx),
-                'graphnode': webutil.getgraphnode(web.repo, ctx),
-                'vertex': vtx,
-                'edges': edges,
+                b'node': pycompat.bytestr(ctx),
+                b'graphnode': webutil.getgraphnode(web.repo, ctx),
+                b'vertex': vtx,
+                b'edges': edges,
             }
 
     def nodes(context):
@@ -1438,23 +1442,23 @@
             entry = webutil.commonentry(web.repo, ctx)
             edgedata = [
                 {
-                    'col': edge[0],
-                    'nextcol': edge[1],
-                    'color': (edge[2] - 1) % 6 + 1,
-                    'width': edge[3],
-                    'bcolor': edge[4],
+                    b'col': edge[0],
+                    b'nextcol': edge[1],
+                    b'color': (edge[2] - 1) % 6 + 1,
+                    b'width': edge[3],
+                    b'bcolor': edge[4],
                 }
                 for edge in edges
             ]
 
             entry.update(
                 {
-                    'col': vtx[0],
-                    'color': (vtx[1] - 1) % 6 + 1,
-                    'parity': next(parity),
-                    'edges': templateutil.mappinglist(edgedata),
-                    'row': row,
-                    'nextrow': row + 1,
+                    b'col': vtx[0],
+                    b'color': (vtx[1] - 1) % 6 + 1,
+                    b'parity': next(parity),
+                    b'edges': templateutil.mappinglist(edgedata),
+                    b'row': row,
+                    b'nextrow': row + 1,
                 }
             )
 
@@ -1463,7 +1467,7 @@
     rows = len(tree)
 
     return web.sendtemplate(
-        'graph',
+        b'graph',
         rev=rev,
         symrev=symrev,
         revcount=revcount,
@@ -1479,7 +1483,7 @@
         jsdata=templateutil.mappinggenerator(jsdata),
         nodes=templateutil.mappinggenerator(nodes),
         node=ctx.hex(),
-        archives=web.archivelist('tip'),
+        archives=web.archivelist(b'tip'),
         changenav=changenav,
     )
 
@@ -1487,13 +1491,13 @@
 def _getdoc(e):
     doc = e[0].__doc__
     if doc:
-        doc = _(doc).partition('\n')[0]
+        doc = _(doc).partition(b'\n')[0]
     else:
-        doc = _('(no help text available)')
+        doc = _(b'(no help text available)')
     return doc
 
 
-@webcommand('help')
+@webcommand(b'help')
 def help(web):
     """
     /help[/{topic}]
@@ -1510,19 +1514,19 @@
     """
     from .. import commands, help as helpmod  # avoid cycle
 
-    topicname = web.req.qsparams.get('node')
+    topicname = web.req.qsparams.get(b'node')
     if not topicname:
 
         def topics(context):
             for h in helpmod.helptable:
                 entries, summary, _doc = h[0:3]
-                yield {'topic': entries[0], 'summary': summary}
+                yield {b'topic': entries[0], b'summary': summary}
 
         early, other = [], []
-        primary = lambda s: s.partition('|')[0]
+        primary = lambda s: s.partition(b'|')[0]
         for c, e in commands.table.iteritems():
             doc = _getdoc(e)
-            if 'DEPRECATED' in doc or c.startswith('debug'):
+            if b'DEPRECATED' in doc or c.startswith(b'debug'):
                 continue
             cmd = primary(c)
             if getattr(e[0], 'helpbasic', False):
@@ -1535,18 +1539,18 @@
 
         def earlycommands(context):
             for c, doc in early:
-                yield {'topic': c, 'summary': doc}
+                yield {b'topic': c, b'summary': doc}
 
         def othercommands(context):
             for c, doc in other:
-                yield {'topic': c, 'summary': doc}
+                yield {b'topic': c, b'summary': doc}
 
         return web.sendtemplate(
-            'helptopics',
+            b'helptopics',
             topics=templateutil.mappinggenerator(topics),
             earlycommands=templateutil.mappinggenerator(earlycommands),
             othercommands=templateutil.mappinggenerator(othercommands),
-            title='Index',
+            title=b'Index',
         )
 
     # Render an index of sub-topics.
@@ -1555,14 +1559,14 @@
         for entries, summary, _doc in helpmod.subtopics[topicname]:
             topics.append(
                 {
-                    'topic': '%s.%s' % (topicname, entries[0]),
-                    'basename': entries[0],
-                    'summary': summary,
+                    b'topic': b'%s.%s' % (topicname, entries[0]),
+                    b'basename': entries[0],
+                    b'summary': summary,
                 }
             )
 
         return web.sendtemplate(
-            'helptopics',
+            b'helptopics',
             topics=templateutil.mappinglist(topics),
             title=topicname,
             subindex=True,
@@ -1572,10 +1576,10 @@
     u.verbose = True
 
     # Render a page from a sub-topic.
-    if '.' in topicname:
+    if b'.' in topicname:
         # TODO implement support for rendering sections, like
         # `hg help` works.
-        topic, subtopic = topicname.split('.', 1)
+        topic, subtopic = topicname.split(b'.', 1)
         if topic not in helpmod.subtopics:
             raise ErrorResponse(HTTP_NOT_FOUND)
     else:
@@ -1587,7 +1591,7 @@
     except error.Abort:
         raise ErrorResponse(HTTP_NOT_FOUND)
 
-    return web.sendtemplate('help', topic=topicname, doc=doc)
+    return web.sendtemplate(b'help', topic=topicname, doc=doc)
 
 
 # tell hggettext to extract docstrings from these functions:
--- a/mercurial/hgweb/webutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/hgweb/webutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -45,37 +45,42 @@
 
 archivespecs = util.sortdict(
     (
-        ('zip', ('application/zip', 'zip', '.zip', None)),
-        ('gz', ('application/x-gzip', 'tgz', '.tar.gz', None)),
-        ('bz2', ('application/x-bzip2', 'tbz2', '.tar.bz2', None)),
+        (b'zip', (b'application/zip', b'zip', b'.zip', None)),
+        (b'gz', (b'application/x-gzip', b'tgz', b'.tar.gz', None)),
+        (b'bz2', (b'application/x-bzip2', b'tbz2', b'.tar.bz2', None)),
     )
 )
 
 
 def archivelist(ui, nodeid, url=None):
-    allowed = ui.configlist('web', 'allow-archive', untrusted=True)
+    allowed = ui.configlist(b'web', b'allow-archive', untrusted=True)
     archives = []
 
     for typ, spec in archivespecs.iteritems():
         if typ in allowed or ui.configbool(
-            'web', 'allow' + typ, untrusted=True
+            b'web', b'allow' + typ, untrusted=True
         ):
             archives.append(
-                {'type': typ, 'extension': spec[2], 'node': nodeid, 'url': url,}
+                {
+                    b'type': typ,
+                    b'extension': spec[2],
+                    b'node': nodeid,
+                    b'url': url,
+                }
             )
 
     return templateutil.mappinglist(archives)
 
 
 def up(p):
-    if p[0:1] != "/":
-        p = "/" + p
-    if p[-1:] == "/":
+    if p[0:1] != b"/":
+        p = b"/" + p
+    if p[-1:] == b"/":
         p = p[:-1]
     up = os.path.dirname(p)
-    if up == "/":
-        return "/"
-    return up + "/"
+    if up == b"/":
+        return b"/"
+    return up + b"/"
 
 
 def _navseq(step, firststep=None):
@@ -136,8 +141,8 @@
             return templateutil.mappinglist(
                 [
                     {
-                        'before': templateutil.mappinglist([]),
-                        'after': templateutil.mappinglist([]),
+                        b'before': templateutil.mappinglist([]),
+                        b'after': templateutil.mappinglist([]),
                     },
                 ]
             )
@@ -151,28 +156,28 @@
         targets.sort()
 
         first = self._first()
-        navbefore = [{'label': '(%i)' % first, 'node': self.hex(first)}]
+        navbefore = [{b'label': b'(%i)' % first, b'node': self.hex(first)}]
         navafter = []
         for rev in targets:
             if rev not in self._revlog:
                 continue
             if pos < rev < limit:
                 navafter.append(
-                    {'label': '+%d' % abs(rev - pos), 'node': self.hex(rev)}
+                    {b'label': b'+%d' % abs(rev - pos), b'node': self.hex(rev)}
                 )
             if 0 < rev < pos:
                 navbefore.append(
-                    {'label': '-%d' % abs(rev - pos), 'node': self.hex(rev)}
+                    {b'label': b'-%d' % abs(rev - pos), b'node': self.hex(rev)}
                 )
 
-        navafter.append({'label': 'tip', 'node': 'tip'})
+        navafter.append({b'label': b'tip', b'node': b'tip'})
 
         # TODO: maybe this can be a scalar object supporting tomap()
         return templateutil.mappinglist(
             [
                 {
-                    'before': templateutil.mappinglist(navbefore),
-                    'after': templateutil.mappinglist(navafter),
+                    b'before': templateutil.mappinglist(navbefore),
+                    b'after': templateutil.mappinglist(navafter),
                 },
             ]
         )
@@ -199,15 +204,15 @@
 def _ctxsgen(context, ctxs):
     for s in ctxs:
         d = {
-            'node': s.hex(),
-            'rev': s.rev(),
-            'user': s.user(),
-            'date': s.date(),
-            'description': s.description(),
-            'branch': s.branch(),
+            b'node': s.hex(),
+            b'rev': s.rev(),
+            b'user': s.user(),
+            b'date': s.date(),
+            b'description': s.description(),
+            b'branch': s.branch(),
         }
-        if util.safehasattr(s, 'path'):
-            d['file'] = s.path()
+        if util.safehasattr(s, b'path'):
+            d[b'file'] = s.path()
         yield d
 
 
@@ -225,7 +230,12 @@
         ui, untrusted=True, section=section, whitespace=True
     )
 
-    for k in ('ignorews', 'ignorewsamount', 'ignorewseol', 'ignoreblanklines'):
+    for k in (
+        b'ignorews',
+        b'ignorewsamount',
+        b'ignorewseol',
+        b'ignoreblanklines',
+    ):
         v = req.qsparams.get(k)
         if v is not None:
             v = stringutil.parsebool(v)
@@ -235,7 +245,7 @@
 
 
 def annotate(req, fctx, ui):
-    diffopts = difffeatureopts(req, ui, 'annotate')
+    diffopts = difffeatureopts(req, ui, b'annotate')
     return fctx.annotate(follow=True, diffopts=diffopts)
 
 
@@ -254,16 +264,16 @@
 def renamelink(fctx):
     r = fctx.renamed()
     if r:
-        return templateutil.mappinglist([{'file': r[0], 'node': hex(r[1])}])
+        return templateutil.mappinglist([{b'file': r[0], b'node': hex(r[1])}])
     return templateutil.mappinglist([])
 
 
 def nodetagsdict(repo, node):
-    return templateutil.hybridlist(repo.nodetags(node), name='name')
+    return templateutil.hybridlist(repo.nodetags(node), name=b'name')
 
 
 def nodebookmarksdict(repo, node):
-    return templateutil.hybridlist(repo.nodebookmarks(node), name='name')
+    return templateutil.hybridlist(repo.nodebookmarks(node), name=b'name')
 
 
 def nodebranchdict(repo, ctx):
@@ -277,7 +287,7 @@
         branchnode = None
     if branchnode == ctx.node():
         branches.append(branch)
-    return templateutil.hybridlist(branches, name='name')
+    return templateutil.hybridlist(branches, name=b'name')
 
 
 def nodeinbranch(repo, ctx):
@@ -287,17 +297,17 @@
         branchnode = repo.branchtip(branch)
     except error.RepoLookupError:
         branchnode = None
-    if branch != 'default' and branchnode != ctx.node():
+    if branch != b'default' and branchnode != ctx.node():
         branches.append(branch)
-    return templateutil.hybridlist(branches, name='name')
+    return templateutil.hybridlist(branches, name=b'name')
 
 
 def nodebranchnodefault(ctx):
     branches = []
     branch = ctx.branch()
-    if branch != 'default':
+    if branch != b'default':
         branches.append(branch)
-    return templateutil.hybridlist(branches, name='name')
+    return templateutil.hybridlist(branches, name=b'name')
 
 
 def _nodenamesgen(context, f, node, name):
@@ -306,12 +316,12 @@
 
 
 def showtag(repo, t1, node=nullid):
-    args = (repo.nodetags, node, 'tag')
+    args = (repo.nodetags, node, b'tag')
     return templateutil.mappinggenerator(_nodenamesgen, args=args, name=t1)
 
 
 def showbookmark(repo, t1, node=nullid):
-    args = (repo.nodebookmarks, node, 'bookmark')
+    args = (repo.nodebookmarks, node, b'bookmark')
     return templateutil.mappinggenerator(_nodenamesgen, args=args, name=t1)
 
 
@@ -331,33 +341,33 @@
                 return
             count += 1
             if closed:
-                status = 'closed'
+                status = b'closed'
             elif ctx.node() not in heads:
-                status = 'inactive'
+                status = b'inactive'
             else:
-                status = 'open'
+                status = b'open'
             yield {
-                'parity': next(parity),
-                'branch': ctx.branch(),
-                'status': status,
-                'node': ctx.hex(),
-                'date': ctx.date(),
+                b'parity': next(parity),
+                b'branch': ctx.branch(),
+                b'status': status,
+                b'node': ctx.hex(),
+                b'date': ctx.date(),
             }
 
     return templateutil.mappinggenerator(entries)
 
 
 def cleanpath(repo, path):
-    path = path.lstrip('/')
+    path = path.lstrip(b'/')
     auditor = pathutil.pathauditor(repo.root, realfs=False)
-    return pathutil.canonpath(repo.root, '', path, auditor=auditor)
+    return pathutil.canonpath(repo.root, b'', path, auditor=auditor)
 
 
 def changectx(repo, req):
-    changeid = "tip"
-    if 'node' in req.qsparams:
-        changeid = req.qsparams['node']
-        ipos = changeid.find(':')
+    changeid = b"tip"
+    if b'node' in req.qsparams:
+        changeid = req.qsparams[b'node']
+        ipos = changeid.find(b':')
         if ipos != -1:
             changeid = changeid[(ipos + 1) :]
 
@@ -365,9 +375,9 @@
 
 
 def basechangectx(repo, req):
-    if 'node' in req.qsparams:
-        changeid = req.qsparams['node']
-        ipos = changeid.find(':')
+    if b'node' in req.qsparams:
+        changeid = req.qsparams[b'node']
+        ipos = changeid.find(b':')
         if ipos != -1:
             changeid = changeid[:ipos]
             return scmutil.revsymbol(repo, changeid)
@@ -376,15 +386,15 @@
 
 
 def filectx(repo, req):
-    if 'file' not in req.qsparams:
-        raise ErrorResponse(HTTP_NOT_FOUND, 'file not given')
-    path = cleanpath(repo, req.qsparams['file'])
-    if 'node' in req.qsparams:
-        changeid = req.qsparams['node']
-    elif 'filenode' in req.qsparams:
-        changeid = req.qsparams['filenode']
+    if b'file' not in req.qsparams:
+        raise ErrorResponse(HTTP_NOT_FOUND, b'file not given')
+    path = cleanpath(repo, req.qsparams[b'file'])
+    if b'node' in req.qsparams:
+        changeid = req.qsparams[b'node']
+    elif b'filenode' in req.qsparams:
+        changeid = req.qsparams[b'filenode']
     else:
-        raise ErrorResponse(HTTP_NOT_FOUND, 'node or filenode not given')
+        raise ErrorResponse(HTTP_NOT_FOUND, b'node or filenode not given')
     try:
         fctx = scmutil.revsymbol(repo, changeid)[path]
     except error.RepoError:
@@ -394,15 +404,15 @@
 
 
 def linerange(req):
-    linerange = req.qsparams.getall('linerange')
+    linerange = req.qsparams.getall(b'linerange')
     if not linerange:
         return None
     if len(linerange) > 1:
-        raise ErrorResponse(HTTP_BAD_REQUEST, 'redundant linerange parameter')
+        raise ErrorResponse(HTTP_BAD_REQUEST, b'redundant linerange parameter')
     try:
-        fromline, toline = map(int, linerange[0].split(':', 1))
+        fromline, toline = map(int, linerange[0].split(b':', 1))
     except ValueError:
-        raise ErrorResponse(HTTP_BAD_REQUEST, 'invalid linerange parameter')
+        raise ErrorResponse(HTTP_BAD_REQUEST, b'invalid linerange parameter')
     try:
         return util.processlinerange(fromline, toline)
     except error.ParseError as exc:
@@ -410,15 +420,15 @@
 
 
 def formatlinerange(fromline, toline):
-    return '%d:%d' % (fromline + 1, toline)
+    return b'%d:%d' % (fromline + 1, toline)
 
 
 def _succsandmarkersgen(context, mapping):
-    repo = context.resource(mapping, 'repo')
+    repo = context.resource(mapping, b'repo')
     itemmappings = templatekw.showsuccsandmarkers(context, mapping)
     for item in itemmappings.tovalue(context, mapping):
-        item['successors'] = _siblings(
-            repo[successor] for successor in item['successors']
+        item[b'successors'] = _siblings(
+            repo[successor] for successor in item[b'successors']
         )
         yield item
 
@@ -428,17 +438,17 @@
 
 
 # teach templater succsandmarkers is switched to (context, mapping) API
-succsandmarkers._requires = {'repo', 'ctx'}
+succsandmarkers._requires = {b'repo', b'ctx'}
 
 
 def _whyunstablegen(context, mapping):
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
 
     entries = obsutil.whyunstable(repo, ctx)
     for entry in entries:
-        if entry.get('divergentnodes'):
-            entry['divergentnodes'] = _siblings(entry['divergentnodes'])
+        if entry.get(b'divergentnodes'):
+            entry[b'divergentnodes'] = _siblings(entry[b'divergentnodes'])
         yield entry
 
 
@@ -446,7 +456,7 @@
     return templateutil.mappinggenerator(_whyunstablegen, args=(mapping,))
 
 
-whyunstable._requires = {'repo', 'ctx'}
+whyunstable._requires = {b'repo', b'ctx'}
 
 
 def commonentry(repo, ctx):
@@ -455,27 +465,27 @@
         # TODO: perhaps ctx.changectx() should be assigned if ctx is a
         # filectx, but I'm not pretty sure if that would always work because
         # fctx.parents() != fctx.changectx.parents() for example.
-        'ctx': ctx,
-        'rev': ctx.rev(),
-        'node': hex(node),
-        'author': ctx.user(),
-        'desc': ctx.description(),
-        'date': ctx.date(),
-        'extra': ctx.extra(),
-        'phase': ctx.phasestr(),
-        'obsolete': ctx.obsolete(),
-        'succsandmarkers': succsandmarkers,
-        'instabilities': templateutil.hybridlist(
-            ctx.instabilities(), name='instability'
+        b'ctx': ctx,
+        b'rev': ctx.rev(),
+        b'node': hex(node),
+        b'author': ctx.user(),
+        b'desc': ctx.description(),
+        b'date': ctx.date(),
+        b'extra': ctx.extra(),
+        b'phase': ctx.phasestr(),
+        b'obsolete': ctx.obsolete(),
+        b'succsandmarkers': succsandmarkers,
+        b'instabilities': templateutil.hybridlist(
+            ctx.instabilities(), name=b'instability'
         ),
-        'whyunstable': whyunstable,
-        'branch': nodebranchnodefault(ctx),
-        'inbranch': nodeinbranch(repo, ctx),
-        'branches': nodebranchdict(repo, ctx),
-        'tags': nodetagsdict(repo, node),
-        'bookmarks': nodebookmarksdict(repo, node),
-        'parent': lambda context, mapping: parents(ctx),
-        'child': lambda context, mapping: children(ctx),
+        b'whyunstable': whyunstable,
+        b'branch': nodebranchnodefault(ctx),
+        b'inbranch': nodeinbranch(repo, ctx),
+        b'branches': nodebranchdict(repo, ctx),
+        b'tags': nodetagsdict(repo, node),
+        b'bookmarks': nodebookmarksdict(repo, node),
+        b'parent': lambda context, mapping: parents(ctx),
+        b'child': lambda context, mapping: children(ctx),
     }
 
 
@@ -488,17 +498,17 @@
     repo = web.repo
     rev = ctx.rev()
     n = scmutil.binnode(ctx)
-    showtags = showtag(repo, 'changelogtag', n)
+    showtags = showtag(repo, b'changelogtag', n)
     files = listfilediffs(ctx.files(), n, web.maxfiles)
 
     entry = commonentry(repo, ctx)
     entry.update(
         {
-            'allparents': lambda context, mapping: parents(ctx),
-            'parent': lambda context, mapping: parents(ctx, rev - 1),
-            'child': lambda context, mapping: children(ctx, rev + 1),
-            'changelogtag': showtags,
-            'files': files,
+            b'allparents': lambda context, mapping: parents(ctx),
+            b'parent': lambda context, mapping: parents(ctx, rev - 1),
+            b'child': lambda context, mapping: children(ctx, rev + 1),
+            b'changelogtag': showtags,
+            b'files': files,
         }
     )
     return entry
@@ -516,14 +526,14 @@
         count += 1
 
         entry = changelistentry(web, repo[rev])
-        entry['parity'] = next(parityfn)
+        entry[b'parity'] = next(parityfn)
 
         yield entry
 
 
 def symrevorshortnode(req, ctx):
-    if 'node' in req.qsparams:
-        return templatefilters.revescape(req.qsparams['node'])
+    if b'node' in req.qsparams:
+        return templatefilters.revescape(req.qsparams[b'node'])
     else:
         return short(scmutil.binnode(ctx))
 
@@ -531,14 +541,14 @@
 def _listfilesgen(context, ctx, stripecount):
     parity = paritygen(stripecount)
     for blockno, f in enumerate(ctx.files()):
-        template = 'filenodelink' if f in ctx else 'filenolink'
+        template = b'filenodelink' if f in ctx else b'filenolink'
         yield context.process(
             template,
             {
-                'node': ctx.hex(),
-                'file': f,
-                'blockno': blockno + 1,
-                'parity': next(parity),
+                b'node': ctx.hex(),
+                b'file': f,
+                b'blockno': blockno + 1,
+                b'parity': next(parity),
             },
         )
 
@@ -546,9 +556,9 @@
 def changesetentry(web, ctx):
     '''Obtain a dictionary to be used to render the "changeset" template.'''
 
-    showtags = showtag(web.repo, 'changesettag', scmutil.binnode(ctx))
+    showtags = showtag(web.repo, b'changesettag', scmutil.binnode(ctx))
     showbookmarks = showbookmark(
-        web.repo, 'changesetbookmark', scmutil.binnode(ctx)
+        web.repo, b'changesetbookmark', scmutil.binnode(ctx)
     )
     showbranch = nodebranchnodefault(ctx)
 
@@ -556,9 +566,9 @@
     if basectx is None:
         basectx = ctx.p1()
 
-    style = web.config('web', 'style')
-    if 'style' in web.req.qsparams:
-        style = web.req.qsparams['style']
+    style = web.config(b'web', b'style')
+    if b'style' in web.req.qsparams:
+        style = web.req.qsparams[b'style']
 
     diff = diffs(web, ctx, basectx, None, style)
 
@@ -585,9 +595,9 @@
 
 def _listfilediffsgen(context, files, node, max):
     for f in files[:max]:
-        yield context.process('filedifflink', {'node': hex(node), 'file': f})
+        yield context.process(b'filedifflink', {b'node': hex(node), b'file': f})
     if len(files) > max:
-        yield context.process('fileellipses', {})
+        yield context.process(b'fileellipses', {})
 
 
 def listfilediffs(files, node, max):
@@ -598,22 +608,22 @@
 
 def _prettyprintdifflines(context, lines, blockno, lineidprefix):
     for lineno, l in enumerate(lines, 1):
-        difflineno = "%d.%d" % (blockno, lineno)
-        if l.startswith('+'):
-            ltype = "difflineplus"
-        elif l.startswith('-'):
-            ltype = "difflineminus"
-        elif l.startswith('@'):
-            ltype = "difflineat"
+        difflineno = b"%d.%d" % (blockno, lineno)
+        if l.startswith(b'+'):
+            ltype = b"difflineplus"
+        elif l.startswith(b'-'):
+            ltype = b"difflineminus"
+        elif l.startswith(b'@'):
+            ltype = b"difflineat"
         else:
-            ltype = "diffline"
+            ltype = b"diffline"
         yield context.process(
             ltype,
             {
-                'line': l,
-                'lineno': lineno,
-                'lineid': lineidprefix + "l%s" % difflineno,
-                'linenumber': "% 8s" % difflineno,
+                b'line': l,
+                b'lineno': lineno,
+                b'lineid': lineidprefix + b"l%s" % difflineno,
+                b'linenumber': b"% 8s" % difflineno,
             },
         )
 
@@ -639,9 +649,9 @@
 
     diffhunks = patch.diffhunks(repo, basectx, ctx, m, opts=diffopts)
     for blockno, (fctx1, fctx2, header, hunks) in enumerate(diffhunks, 1):
-        if style != 'raw':
+        if style != b'raw':
             header = header[1:]
-        lines = [h + '\n' for h in header]
+        lines = [h + b'\n' for h in header]
         for hunkrange, hunklines in hunks:
             if linerange is not None and hunkrange is not None:
                 s1, l1, s2, l2 = hunkrange
@@ -653,13 +663,13 @@
                 _prettyprintdifflines, args=(lines, blockno, lineidprefix)
             )
             yield {
-                'parity': next(parity),
-                'blockno': blockno,
-                'lines': l,
+                b'parity': next(parity),
+                b'blockno': blockno,
+                b'lines': l,
             }
 
 
-def diffs(web, ctx, basectx, files, style, linerange=None, lineidprefix=''):
+def diffs(web, ctx, basectx, files, style, linerange=None, lineidprefix=b''):
     args = (
         web.repo,
         ctx,
@@ -670,23 +680,25 @@
         linerange,
         lineidprefix,
     )
-    return templateutil.mappinggenerator(_diffsgen, args=args, name='diffblock')
+    return templateutil.mappinggenerator(
+        _diffsgen, args=args, name=b'diffblock'
+    )
 
 
 def _compline(type, leftlineno, leftline, rightlineno, rightline):
-    lineid = leftlineno and ("l%d" % leftlineno) or ''
-    lineid += rightlineno and ("r%d" % rightlineno) or ''
-    llno = '%d' % leftlineno if leftlineno else ''
-    rlno = '%d' % rightlineno if rightlineno else ''
+    lineid = leftlineno and (b"l%d" % leftlineno) or b''
+    lineid += rightlineno and (b"r%d" % rightlineno) or b''
+    llno = b'%d' % leftlineno if leftlineno else b''
+    rlno = b'%d' % rightlineno if rightlineno else b''
     return {
-        'type': type,
-        'lineid': lineid,
-        'leftlineno': leftlineno,
-        'leftlinenumber': "% 6s" % llno,
-        'leftline': leftline or '',
-        'rightlineno': rightlineno,
-        'rightlinenumber': "% 6s" % rlno,
-        'rightline': rightline or '',
+        b'type': type,
+        b'lineid': lineid,
+        b'leftlineno': leftlineno,
+        b'leftlinenumber': b"% 6s" % llno,
+        b'leftline': leftline or b'',
+        b'rightlineno': rightlineno,
+        b'rightlinenumber': b"% 6s" % rlno,
+        b'rightline': rightline or b'',
     }
 
 
@@ -727,7 +739,7 @@
 def _getcompblock(leftlines, rightlines, opcodes):
     args = (leftlines, rightlines, opcodes)
     return templateutil.mappinggenerator(
-        _getcompblockgen, args=args, name='comparisonline'
+        _getcompblockgen, args=args, name=b'comparisonline'
     )
 
 
@@ -736,24 +748,24 @@
     s = difflib.SequenceMatcher(None, leftlines, rightlines)
     if contextnum < 0:
         l = _getcompblock(leftlines, rightlines, s.get_opcodes())
-        yield {'lines': l}
+        yield {b'lines': l}
     else:
         for oc in s.get_grouped_opcodes(n=contextnum):
             l = _getcompblock(leftlines, rightlines, oc)
-            yield {'lines': l}
+            yield {b'lines': l}
 
 
 def compare(contextnum, leftlines, rightlines):
     args = (contextnum, leftlines, rightlines)
     return templateutil.mappinggenerator(
-        _comparegen, args=args, name='comparisonblock'
+        _comparegen, args=args, name=b'comparisonblock'
     )
 
 
 def diffstatgen(ui, ctx, basectx):
     '''Generator function that provides the diffstat data.'''
 
-    diffopts = patch.diffopts(ui, {'noprefix': False})
+    diffopts = patch.diffopts(ui, {b'noprefix': False})
     stats = patch.diffstatdata(util.iterlines(ctx.diff(basectx, opts=diffopts)))
     maxname, maxtotal, addtotal, removetotal, binary = patch.diffstatsum(stats)
     while True:
@@ -764,7 +776,7 @@
     '''Return a short summary of the diff.'''
 
     stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen)
-    return _(' %d files changed, %d insertions(+), %d deletions(-)\n') % (
+    return _(b' %d files changed, %d insertions(+), %d deletions(-)\n') % (
         len(stats),
         addtotal,
         removetotal,
@@ -782,19 +794,19 @@
 
     fileno = 0
     for filename, adds, removes, isbinary in stats:
-        template = 'diffstatlink' if filename in files else 'diffstatnolink'
+        template = b'diffstatlink' if filename in files else b'diffstatnolink'
         total = adds + removes
         fileno += 1
         yield context.process(
             template,
             {
-                'node': ctx.hex(),
-                'file': filename,
-                'fileno': fileno,
-                'total': total,
-                'addpct': pct(adds),
-                'removepct': pct(removes),
-                'parity': next(parity),
+                b'node': ctx.hex(),
+                b'file': filename,
+                b'fileno': fileno,
+                b'total': total,
+                b'addpct': pct(adds),
+                b'removepct': pct(removes),
+                b'parity': next(parity),
             },
         )
 
@@ -806,7 +818,7 @@
 
 
 class sessionvars(templateutil.wrapped):
-    def __init__(self, vars, start='?'):
+    def __init__(self, vars, start=b'?'):
         self._start = start
         self._vars = vars
 
@@ -828,31 +840,31 @@
         return self._vars.get(key)
 
     def getmin(self, context, mapping):
-        raise error.ParseError(_('not comparable'))
+        raise error.ParseError(_(b'not comparable'))
 
     def getmax(self, context, mapping):
-        raise error.ParseError(_('not comparable'))
+        raise error.ParseError(_(b'not comparable'))
 
     def filter(self, context, mapping, select):
         # implement if necessary
-        raise error.ParseError(_('not filterable'))
+        raise error.ParseError(_(b'not filterable'))
 
     def itermaps(self, context):
         separator = self._start
         for key, value in sorted(self._vars.iteritems()):
             yield {
-                'name': key,
-                'value': pycompat.bytestr(value),
-                'separator': separator,
+                b'name': key,
+                b'value': pycompat.bytestr(value),
+                b'separator': separator,
             }
-            separator = '&'
+            separator = b'&'
 
     def join(self, context, mapping, sep):
         # could be '{separator}{name}={value|urlescape}'
-        raise error.ParseError(_('not displayable without template'))
+        raise error.ParseError(_(b'not displayable without template'))
 
     def show(self, context, mapping):
-        return self.join(context, '')
+        return self.join(context, b'')
 
     def tobool(self, context, mapping):
         return bool(self._vars)
@@ -869,9 +881,9 @@
 
 def getwebsubs(repo):
     websubtable = []
-    websubdefs = repo.ui.configitems('websub')
+    websubdefs = repo.ui.configitems(b'websub')
     # we must maintain interhg backwards compatibility
-    websubdefs += repo.ui.configitems('interhg')
+    websubdefs += repo.ui.configitems(b'interhg')
     for key, pattern in websubdefs:
         # grab the delimiter from the character after the "s"
         unesc = pattern[1:2]
@@ -887,7 +899,7 @@
         )
         if not match:
             repo.ui.warn(
-                _("websub: invalid pattern for %s: %s\n") % (key, pattern)
+                _(b"websub: invalid pattern for %s: %s\n") % (key, pattern)
             )
             continue
 
@@ -908,7 +920,7 @@
             websubtable.append((regexp, format))
         except re.error:
             repo.ui.warn(
-                _("websub: invalid regexp for %s: %s\n") % (key, regexp)
+                _(b"websub: invalid regexp for %s: %s\n") % (key, regexp)
             )
     return websubtable
 
--- a/mercurial/hgweb/wsgicgi.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/hgweb/wsgicgi.py	Sun Oct 06 09:48:39 2019 -0400
@@ -24,7 +24,7 @@
     procutil.setbinary(procutil.stdout)
 
     environ = dict(os.environ.iteritems())  # re-exports
-    environ.setdefault(r'PATH_INFO', '')
+    environ.setdefault(r'PATH_INFO', b'')
     if environ.get(r'SERVER_SOFTWARE', r'').startswith(r'Microsoft-IIS'):
         # IIS includes script_name in PATH_INFO
         scriptname = environ[r'SCRIPT_NAME']
@@ -53,18 +53,18 @@
 
     def write(data):
         if not headers_set:
-            raise AssertionError("write() before start_response()")
+            raise AssertionError(b"write() before start_response()")
 
         elif not headers_sent:
             # Before the first output, send the stored headers
             status, response_headers = headers_sent[:] = headers_set
-            out.write('Status: %s\r\n' % pycompat.bytesurl(status))
+            out.write(b'Status: %s\r\n' % pycompat.bytesurl(status))
             for hk, hv in response_headers:
                 out.write(
-                    '%s: %s\r\n'
+                    b'%s: %s\r\n'
                     % (pycompat.bytesurl(hk), pycompat.bytesurl(hv))
                 )
-            out.write('\r\n')
+            out.write(b'\r\n')
 
         out.write(data)
         out.flush()
@@ -78,7 +78,7 @@
             finally:
                 exc_info = None  # avoid dangling circular ref
         elif headers_set:
-            raise AssertionError("Headers already set!")
+            raise AssertionError(b"Headers already set!")
 
         headers_set[:] = [status, response_headers]
         return write
@@ -88,6 +88,6 @@
         for chunk in content:
             write(chunk)
         if not headers_sent:
-            write('')  # send headers now if body was empty
+            write(b'')  # send headers now if body was empty
     finally:
         getattr(content, 'close', lambda: None)()
--- a/mercurial/hgweb/wsgiheaders.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/hgweb/wsgiheaders.py	Sun Oct 06 09:48:39 2019 -0400
@@ -22,10 +22,10 @@
     """
     if value is not None and len(value) > 0:
         if quote or tspecials.search(value):
-            value = value.replace('\\', '\\\\').replace('"', r'\"')
-            return '%s="%s"' % (param, value)
+            value = value.replace(b'\\', b'\\\\').replace(b'"', r'\"')
+            return b'%s="%s"' % (param, value)
         else:
-            return '%s=%s' % (param, value)
+            return b'%s=%s' % (param, value)
     else:
         return param
 
@@ -36,7 +36,7 @@
     def __init__(self, headers=None):
         headers = headers if headers is not None else []
         if type(headers) is not list:
-            raise TypeError("Headers must be a list of name/value tuples")
+            raise TypeError(b"Headers must be a list of name/value tuples")
         self._headers = headers
         if __debug__:
             for k, v in headers:
@@ -134,7 +134,9 @@
     def __str__(self):
         """str() returns the formatted headers, complete with end line,
         suitable for direct HTTP transmission."""
-        return '\r\n'.join(["%s: %s" % kv for kv in self._headers] + ['', ''])
+        return b'\r\n'.join(
+            [b"%s: %s" % kv for kv in self._headers] + [b'', b'']
+        )
 
     def __bytes__(self):
         return str(self).encode('iso-8859-1')
@@ -174,10 +176,10 @@
         for k, v in _params.items():
             k = self._convert_string_type(k)
             if v is None:
-                parts.append(k.replace('_', '-'))
+                parts.append(k.replace(b'_', b'-'))
             else:
                 v = self._convert_string_type(v)
-                parts.append(_formatparam(k.replace('_', '-'), v))
+                parts.append(_formatparam(k.replace(b'_', b'-'), v))
         self._headers.append(
-            (self._convert_string_type(_name), "; ".join(parts))
+            (self._convert_string_type(_name), b"; ".join(parts))
         )
--- a/mercurial/hook.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/hook.py	Sun Oct 06 09:48:39 2019 -0400
@@ -39,10 +39,10 @@
         obj = funcname
         funcname = pycompat.sysbytes(obj.__module__ + r"." + obj.__name__)
     else:
-        d = funcname.rfind('.')
+        d = funcname.rfind(b'.')
         if d == -1:
             raise error.HookLoadError(
-                _('%s hook is invalid: "%s" not in a module')
+                _(b'%s hook is invalid: "%s" not in a module')
                 % (hname, funcname)
             )
         modname = funcname[:d]
@@ -66,77 +66,78 @@
                     if ui.tracebackflag:
                         ui.warn(
                             _(
-                                'exception from first failed import '
-                                'attempt:\n'
+                                b'exception from first failed import '
+                                b'attempt:\n'
                             )
                         )
                     ui.traceback(e1)
                     if ui.tracebackflag:
                         ui.warn(
                             _(
-                                'exception from second failed import '
-                                'attempt:\n'
+                                b'exception from second failed import '
+                                b'attempt:\n'
                             )
                         )
                     ui.traceback(e2)
 
                     if not ui.tracebackflag:
                         tracebackhint = _(
-                            'run with --traceback for stack trace'
+                            b'run with --traceback for stack trace'
                         )
                     else:
                         tracebackhint = None
                     raise error.HookLoadError(
-                        _('%s hook is invalid: import of "%s" failed')
+                        _(b'%s hook is invalid: import of "%s" failed')
                         % (hname, modname),
                         hint=tracebackhint,
                     )
         sys.path = oldpaths
         try:
-            for p in funcname.split('.')[1:]:
+            for p in funcname.split(b'.')[1:]:
                 obj = getattr(obj, p)
         except AttributeError:
             raise error.HookLoadError(
-                _('%s hook is invalid: "%s" is not defined') % (hname, funcname)
+                _(b'%s hook is invalid: "%s" is not defined')
+                % (hname, funcname)
             )
         if not callable(obj):
             raise error.HookLoadError(
-                _('%s hook is invalid: "%s" is not callable')
+                _(b'%s hook is invalid: "%s" is not callable')
                 % (hname, funcname)
             )
 
-    ui.note(_("calling hook %s: %s\n") % (hname, funcname))
+    ui.note(_(b"calling hook %s: %s\n") % (hname, funcname))
     starttime = util.timer()
 
     try:
         r = obj(ui=ui, repo=repo, hooktype=htype, **pycompat.strkwargs(args))
     except Exception as exc:
         if isinstance(exc, error.Abort):
-            ui.warn(_('error: %s hook failed: %s\n') % (hname, exc.args[0]))
+            ui.warn(_(b'error: %s hook failed: %s\n') % (hname, exc.args[0]))
         else:
             ui.warn(
-                _('error: %s hook raised an exception: ' '%s\n')
+                _(b'error: %s hook raised an exception: ' b'%s\n')
                 % (hname, stringutil.forcebytestr(exc))
             )
         if throw:
             raise
         if not ui.tracebackflag:
-            ui.warn(_('(run with --traceback for stack trace)\n'))
+            ui.warn(_(b'(run with --traceback for stack trace)\n'))
         ui.traceback()
         return True, True
     finally:
         duration = util.timer() - starttime
         ui.log(
-            'pythonhook',
-            'pythonhook-%s: %s finished in %0.2f seconds\n',
+            b'pythonhook',
+            b'pythonhook-%s: %s finished in %0.2f seconds\n',
             htype,
             funcname,
             duration,
         )
     if r:
         if throw:
-            raise error.HookAbort(_('%s hook failed') % hname)
-        ui.warn(_('warning: %s hook failed\n') % hname)
+            raise error.HookAbort(_(b'%s hook failed') % hname)
+        ui.warn(_(b'warning: %s hook failed\n') % hname)
     return r, False
 
 
@@ -149,35 +150,35 @@
         tr = repo.currenttransaction()
         repo.dirstate.write(tr)
         if tr and tr.writepending():
-            env['HG_PENDING'] = repo.root
-    env['HG_HOOKTYPE'] = htype
-    env['HG_HOOKNAME'] = name
+            env[b'HG_PENDING'] = repo.root
+    env[b'HG_HOOKTYPE'] = htype
+    env[b'HG_HOOKNAME'] = name
 
     for k, v in args.iteritems():
         if callable(v):
             v = v()
         if isinstance(v, (dict, list)):
             v = stringutil.pprint(v)
-        env['HG_' + k.upper()] = v
+        env[b'HG_' + k.upper()] = v
 
-    if ui.configbool('hooks', 'tonative.%s' % name, False):
+    if ui.configbool(b'hooks', b'tonative.%s' % name, False):
         oldcmd = cmd
         cmd = procutil.shelltonative(cmd, env)
         if cmd != oldcmd:
-            ui.note(_('converting hook "%s" to native\n') % name)
+            ui.note(_(b'converting hook "%s" to native\n') % name)
 
-    ui.note(_("running hook %s: %s\n") % (name, cmd))
+    ui.note(_(b"running hook %s: %s\n") % (name, cmd))
 
     if repo:
         cwd = repo.root
     else:
         cwd = encoding.getcwd()
-    r = ui.system(cmd, environ=env, cwd=cwd, blockedtag='exthook-%s' % (name,))
+    r = ui.system(cmd, environ=env, cwd=cwd, blockedtag=b'exthook-%s' % (name,))
 
     duration = util.timer() - starttime
     ui.log(
-        'exthook',
-        'exthook-%s: %s finished in %0.2f seconds\n',
+        b'exthook',
+        b'exthook-%s: %s finished in %0.2f seconds\n',
         name,
         cmd,
         duration,
@@ -185,8 +186,8 @@
     if r:
         desc = procutil.explainexit(r)
         if throw:
-            raise error.HookAbort(_('%s hook %s') % (name, desc))
-        ui.warn(_('warning: %s hook %s\n') % (name, desc))
+            raise error.HookAbort(_(b'%s hook %s') % (name, desc))
+        ui.warn(_(b'warning: %s hook %s\n') % (name, desc))
     return r
 
 
@@ -213,11 +214,11 @@
 def _hookitems(ui, _untrusted=False):
     """return all hooks items ready to be sorted"""
     hooks = {}
-    for name, cmd in ui.configitems('hooks', untrusted=_untrusted):
-        if name.startswith('priority.') or name.startswith('tonative.'):
+    for name, cmd in ui.configitems(b'hooks', untrusted=_untrusted):
+        if name.startswith(b'priority.') or name.startswith(b'tonative.'):
             continue
 
-        priority = ui.configint('hooks', 'priority.%s' % name, 0)
+        priority = ui.configint(b'hooks', b'priority.%s' % name, 0)
         hooks[name] = (-priority, len(hooks), name, cmd)
     return hooks
 
@@ -235,7 +236,7 @@
     if not ui.callhooks:
         return False
     for hname, cmd in _allhooks(ui):
-        if hname.split('.')[0] == htype and cmd:
+        if hname.split(b'.')[0] == htype and cmd:
             return True
     return False
 
@@ -246,7 +247,7 @@
 
     hooks = []
     for hname, cmd in _allhooks(ui):
-        if hname.split('.')[0] == htype and cmd:
+        if hname.split(b'.')[0] == htype and cmd:
             hooks.append((hname, cmd))
 
     res = runhooks(ui, repo, htype, hooks, throw=throw, **args)
@@ -279,24 +280,24 @@
             if cmd is _fromuntrusted:
                 if throw:
                     raise error.HookAbort(
-                        _('untrusted hook %s not executed') % hname,
-                        hint=_("see 'hg help config.trusted'"),
+                        _(b'untrusted hook %s not executed') % hname,
+                        hint=_(b"see 'hg help config.trusted'"),
                     )
-                ui.warn(_('warning: untrusted hook %s not executed\n') % hname)
+                ui.warn(_(b'warning: untrusted hook %s not executed\n') % hname)
                 r = 1
                 raised = False
             elif callable(cmd):
                 r, raised = pythonhook(ui, repo, htype, hname, cmd, args, throw)
-            elif cmd.startswith('python:'):
-                if cmd.count(':') >= 2:
-                    path, cmd = cmd[7:].rsplit(':', 1)
+            elif cmd.startswith(b'python:'):
+                if cmd.count(b':') >= 2:
+                    path, cmd = cmd[7:].rsplit(b':', 1)
                     path = util.expandpath(path)
                     if repo:
                         path = os.path.join(repo.root, path)
                     try:
-                        mod = extensions.loadpath(path, 'hghook.%s' % hname)
+                        mod = extensions.loadpath(path, b'hghook.%s' % hname)
                     except Exception:
-                        ui.write(_("loading %s hook failed:\n") % hname)
+                        ui.write(_(b"loading %s hook failed:\n") % hname)
                         raise
                     hookfn = getattr(mod, cmd)
                 else:
--- a/mercurial/httpconnection.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/httpconnection.py	Sun Oct 06 09:48:39 2019 -0400
@@ -44,7 +44,7 @@
         # once whether authentication will be required, just lie to
         # the user and maybe the push succeeds suddenly at 50%.
         self._progress = ui.makeprogress(
-            _('sending'), unit=_('kb'), total=(self.length // 1024 * 2)
+            _(b'sending'), unit=_(b'kb'), total=(self.length // 1024 * 2)
         )
 
     def read(self, *args, **kwargs):
@@ -68,30 +68,30 @@
     uri = pycompat.bytesurl(uri)
     # Read configuration
     groups = {}
-    for key, val in ui.configitems('auth'):
-        if key in ('cookiefile',):
+    for key, val in ui.configitems(b'auth'):
+        if key in (b'cookiefile',):
             continue
 
-        if '.' not in key:
-            ui.warn(_("ignoring invalid [auth] key '%s'\n") % key)
+        if b'.' not in key:
+            ui.warn(_(b"ignoring invalid [auth] key '%s'\n") % key)
             continue
-        group, setting = key.rsplit('.', 1)
+        group, setting = key.rsplit(b'.', 1)
         gdict = groups.setdefault(group, {})
-        if setting in ('username', 'cert', 'key'):
+        if setting in (b'username', b'cert', b'key'):
             val = util.expandpath(val)
         gdict[setting] = val
 
     # Find the best match
-    scheme, hostpath = uri.split('://', 1)
+    scheme, hostpath = uri.split(b'://', 1)
     bestuser = None
     bestlen = 0
     bestauth = None
     for group, auth in groups.iteritems():
-        if user and user != auth.get('username', user):
+        if user and user != auth.get(b'username', user):
             # If a username was set in the URI, the entry username
             # must either match it or be unset
             continue
-        prefix = auth.get('prefix')
+        prefix = auth.get(b'prefix')
         if not prefix:
             continue
 
@@ -106,26 +106,26 @@
         prefixurl.user = None
         prefix = bytes(prefixurl)
 
-        p = prefix.split('://', 1)
+        p = prefix.split(b'://', 1)
         if len(p) > 1:
             schemes, prefix = [p[0]], p[1]
         else:
-            schemes = (auth.get('schemes') or 'https').split()
+            schemes = (auth.get(b'schemes') or b'https').split()
         if (
-            (prefix == '*' or hostpath.startswith(prefix))
+            (prefix == b'*' or hostpath.startswith(prefix))
             and (
                 len(prefix) > bestlen
                 or (
                     len(prefix) == bestlen
                     and not bestuser
-                    and 'username' in auth
+                    and b'username' in auth
                 )
             )
             and scheme in schemes
         ):
             bestlen = len(prefix)
             bestauth = group, auth
-            bestuser = auth.get('username')
+            bestuser = auth.get(b'username')
             if user and not bestuser:
-                auth['username'] = user
+                auth[b'username'] = user
     return bestauth
--- a/mercurial/httppeer.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/httppeer.py	Sun Oct 06 09:48:39 2019 -0400
@@ -62,7 +62,7 @@
     # and using an r-string to make it portable between Python 2 and 3
     # doesn't work because then the \r is a literal backslash-r
     # instead of a carriage return.
-    valuelen = limit - len(fmt % r'000') - len(': \r\n')
+    valuelen = limit - len(fmt % r'000') - len(b': \r\n')
     result = []
 
     n = 0
@@ -76,10 +76,10 @@
 class _multifile(object):
     def __init__(self, *fileobjs):
         for f in fileobjs:
-            if not util.safehasattr(f, 'length'):
+            if not util.safehasattr(f, b'length'):
                 raise ValueError(
-                    '_multifile only supports file objects that '
-                    'have a length but this one does not:',
+                    b'_multifile only supports file objects that '
+                    b'have a length but this one does not:',
                     type(f),
                     f,
                 )
@@ -92,7 +92,7 @@
 
     def read(self, amt=None):
         if amt <= 0:
-            return ''.join(f.read() for f in self._fileobjs)
+            return b''.join(f.read() for f in self._fileobjs)
         parts = []
         while amt and self._index < len(self._fileobjs):
             parts.append(self._fileobjs[self._index].read(amt))
@@ -100,18 +100,18 @@
             if got < amt:
                 self._index += 1
             amt -= got
-        return ''.join(parts)
+        return b''.join(parts)
 
     def seek(self, offset, whence=os.SEEK_SET):
         if whence != os.SEEK_SET:
             raise NotImplementedError(
-                '_multifile does not support anything other'
-                ' than os.SEEK_SET for whence on seek()'
+                b'_multifile does not support anything other'
+                b' than os.SEEK_SET for whence on seek()'
             )
         if offset != 0:
             raise NotImplementedError(
-                '_multifile only supports seeking to start, but that '
-                'could be fixed if you need it'
+                b'_multifile only supports seeking to start, but that '
+                b'could be fixed if you need it'
             )
         for f in self._fileobjs:
             f.seek(0)
@@ -131,18 +131,18 @@
     ``cmd``, ``args``, and ``data`` define the command, its arguments, and
     raw data to pass to it.
     """
-    if cmd == 'pushkey':
-        args['data'] = ''
-    data = args.pop('data', None)
-    headers = args.pop('headers', {})
+    if cmd == b'pushkey':
+        args[b'data'] = b''
+    data = args.pop(b'data', None)
+    headers = args.pop(b'headers', {})
 
-    ui.debug("sending %s command\n" % cmd)
-    q = [('cmd', cmd)]
+    ui.debug(b"sending %s command\n" % cmd)
+    q = [(b'cmd', cmd)]
     headersize = 0
     # Important: don't use self.capable() here or else you end up
     # with infinite recursion when trying to look up capabilities
     # for the first time.
-    postargsok = caps is not None and 'httppostargs' in caps
+    postargsok = caps is not None and b'httppostargs' in caps
 
     # Send arguments via POST.
     if postargsok and args:
@@ -162,27 +162,27 @@
         # Calling self.capable() can infinite loop if we are calling
         # "capabilities". But that command should never accept wire
         # protocol arguments. So this should never happen.
-        assert cmd != 'capabilities'
-        httpheader = capablefn('httpheader')
+        assert cmd != b'capabilities'
+        httpheader = capablefn(b'httpheader')
         if httpheader:
-            headersize = int(httpheader.split(',', 1)[0])
+            headersize = int(httpheader.split(b',', 1)[0])
 
         # Send arguments via HTTP headers.
         if headersize > 0:
             # The headers can typically carry more data than the URL.
             encargs = urlreq.urlencode(sorted(args.items()))
             for header, value in encodevalueinheaders(
-                encargs, 'X-HgArg', headersize
+                encargs, b'X-HgArg', headersize
             ):
                 headers[header] = value
         # Send arguments via query string (Mercurial <1.9).
         else:
             q += sorted(args.items())
 
-    qs = '?%s' % urlreq.urlencode(q)
-    cu = "%s%s" % (repobaseurl, qs)
+    qs = b'?%s' % urlreq.urlencode(q)
+    cu = b"%s%s" % (repobaseurl, qs)
     size = 0
-    if util.safehasattr(data, 'length'):
+    if util.safehasattr(data, b'length'):
         size = data.length
     elif data is not None:
         size = len(data)
@@ -198,17 +198,17 @@
 
     mediatypes = set()
     if caps is not None:
-        mt = capablefn('httpmediatype')
+        mt = capablefn(b'httpmediatype')
         if mt:
-            protoparams.add('0.1')
-            mediatypes = set(mt.split(','))
+            protoparams.add(b'0.1')
+            mediatypes = set(mt.split(b','))
 
-        protoparams.add('partial-pull')
+        protoparams.add(b'partial-pull')
 
-    if '0.2tx' in mediatypes:
-        protoparams.add('0.2')
+    if b'0.2tx' in mediatypes:
+        protoparams.add(b'0.2')
 
-    if '0.2tx' in mediatypes and capablefn('compression'):
+    if b'0.2tx' in mediatypes and capablefn(b'compression'):
         # We /could/ compare supported compression formats and prune
         # non-mutually supported or error if nothing is mutually supported.
         # For now, send the full list to the server and have it error.
@@ -216,11 +216,11 @@
             e.wireprotosupport().name
             for e in util.compengines.supportedwireengines(util.CLIENTROLE)
         ]
-        protoparams.add('comp=%s' % ','.join(comps))
+        protoparams.add(b'comp=%s' % b','.join(comps))
 
     if protoparams:
         protoheaders = encodevalueinheaders(
-            ' '.join(sorted(protoparams)), 'X-HgProto', headersize or 1024
+            b' '.join(sorted(protoparams)), b'X-HgProto', headersize or 1024
         )
         for header, value in protoheaders:
             headers[header] = value
@@ -236,7 +236,7 @@
     req = requestbuilder(pycompat.strurl(cu), data, headers)
 
     if data is not None:
-        ui.debug("sending %d bytes\n" % size)
+        ui.debug(b"sending %d bytes\n" % size)
         req.add_unredirected_header(r'Content-Length', r'%d' % size)
 
     return req, cu, qs
@@ -257,11 +257,11 @@
     Returns the response object.
     """
     dbg = ui.debug
-    if ui.debugflag and ui.configbool('devel', 'debug.peer-request'):
-        line = 'devel-peer-request: %s\n'
+    if ui.debugflag and ui.configbool(b'devel', b'debug.peer-request'):
+        line = b'devel-peer-request: %s\n'
         dbg(
             line
-            % '%s %s'
+            % b'%s %s'
             % (
                 pycompat.bytesurl(req.get_method()),
                 pycompat.bytesurl(req.get_full_url()),
@@ -272,17 +272,17 @@
         for header, value in sorted(req.header_items()):
             header = pycompat.bytesurl(header)
             value = pycompat.bytesurl(value)
-            if header.startswith('X-hgarg-'):
+            if header.startswith(b'X-hgarg-'):
                 if hgargssize is None:
                     hgargssize = 0
                 hgargssize += len(value)
             else:
-                dbg(line % '  %s %s' % (header, value))
+                dbg(line % b'  %s %s' % (header, value))
 
         if hgargssize is not None:
             dbg(
                 line
-                % '  %d bytes of commands arguments in headers'
+                % b'  %d bytes of commands arguments in headers'
                 % hgargssize
             )
         data = _reqdata(req)
@@ -290,7 +290,7 @@
             length = getattr(data, 'length', None)
             if length is None:
                 length = len(data)
-            dbg(line % '  %d bytes of data' % length)
+            dbg(line % b'  %d bytes of data' % length)
 
         start = util.timer()
 
@@ -299,20 +299,21 @@
         res = opener.open(req)
     except urlerr.httperror as inst:
         if inst.code == 401:
-            raise error.Abort(_('authorization failed'))
+            raise error.Abort(_(b'authorization failed'))
         raise
     except httplib.HTTPException as inst:
         ui.debug(
-            'http error requesting %s\n' % util.hidepassword(req.get_full_url())
+            b'http error requesting %s\n'
+            % util.hidepassword(req.get_full_url())
         )
         ui.traceback()
         raise IOError(None, inst)
     finally:
-        if ui.debugflag and ui.configbool('devel', 'debug.peer-request'):
+        if ui.debugflag and ui.configbool(b'devel', b'debug.peer-request'):
             code = res.code if res else -1
             dbg(
                 line
-                % '  finished in %.4f seconds (%d)'
+                % b'  finished in %.4f seconds (%d)'
                 % (util.timer() - start, code)
             )
 
@@ -340,10 +341,10 @@
     else:
         qsdropped = True
 
-    if baseurl.rstrip('/') != respurl.rstrip('/'):
+    if baseurl.rstrip(b'/') != respurl.rstrip(b'/'):
         redirected = True
         if not ui.quiet:
-            ui.warn(_('real URL is %s\n') % respurl)
+            ui.warn(_(b'real URL is %s\n') % respurl)
 
     try:
         proto = pycompat.bytesurl(resp.getheader(r'content-type', r''))
@@ -351,17 +352,17 @@
         proto = pycompat.bytesurl(resp.headers.get(r'content-type', r''))
 
     safeurl = util.hidepassword(baseurl)
-    if proto.startswith('application/hg-error'):
+    if proto.startswith(b'application/hg-error'):
         raise error.OutOfBandError(resp.read())
 
     # Pre 1.0 versions of Mercurial used text/plain and
     # application/hg-changegroup. We don't support such old servers.
-    if not proto.startswith('application/mercurial-'):
-        ui.debug("requested URL: '%s'\n" % util.hidepassword(requrl))
+    if not proto.startswith(b'application/mercurial-'):
+        ui.debug(b"requested URL: '%s'\n" % util.hidepassword(requrl))
         msg = _(
-            "'%s' does not appear to be an hg repository:\n"
-            "---%%<--- (%s)\n%s\n---%%<---\n"
-        ) % (safeurl, proto or 'no content-type', resp.read(1024))
+            b"'%s' does not appear to be an hg repository:\n"
+            b"---%%<--- (%s)\n%s\n---%%<---\n"
+        ) % (safeurl, proto or b'no content-type', resp.read(1024))
 
         # Some servers may strip the query string from the redirect. We
         # raise a special error type so callers can react to this specially.
@@ -371,23 +372,23 @@
             raise error.RepoError(msg)
 
     try:
-        subtype = proto.split('-', 1)[1]
+        subtype = proto.split(b'-', 1)[1]
 
         # Unless we end up supporting CBOR in the legacy wire protocol,
         # this should ONLY be encountered for the initial capabilities
         # request during handshake.
-        if subtype == 'cbor':
+        if subtype == b'cbor':
             if allowcbor:
                 return respurl, proto, resp
             else:
                 raise error.RepoError(
-                    _('unexpected CBOR response from ' 'server')
+                    _(b'unexpected CBOR response from ' b'server')
                 )
 
-        version_info = tuple([int(n) for n in subtype.split('.')])
+        version_info = tuple([int(n) for n in subtype.split(b'.')])
     except ValueError:
         raise error.RepoError(
-            _("'%s' sent a broken Content-Type " "header (%s)")
+            _(b"'%s' sent a broken Content-Type " b"header (%s)")
             % (safeurl, proto)
         )
 
@@ -395,19 +396,19 @@
     # generators.
     if version_info == (0, 1):
         if compressible:
-            resp = util.compengines['zlib'].decompressorreader(resp)
+            resp = util.compengines[b'zlib'].decompressorreader(resp)
 
     elif version_info == (0, 2):
         # application/mercurial-0.2 always identifies the compression
         # engine in the payload header.
-        elen = struct.unpack('B', util.readexactly(resp, 1))[0]
+        elen = struct.unpack(b'B', util.readexactly(resp, 1))[0]
         ename = util.readexactly(resp, elen)
         engine = util.compengines.forwiretype(ename)
 
         resp = engine.decompressorreader(resp)
     else:
         raise error.RepoError(
-            _("'%s' uses newer protocol %s") % (safeurl, subtype)
+            _(b"'%s' uses newer protocol %s") % (safeurl, subtype)
         )
 
     return respurl, proto, resp
@@ -419,7 +420,7 @@
         self._path = path
         self._url = url
         self._caps = caps
-        self.limitedarguments = caps is not None and 'httppostargs' not in caps
+        self.limitedarguments = caps is not None and b'httppostargs' not in caps
         self._urlopener = opener
         self._requestbuilder = requestbuilder
 
@@ -453,8 +454,8 @@
             return
         self.ui.note(
             _(
-                '(sent %d HTTP requests and %d bytes; '
-                'received %d bytes in responses)\n'
+                b'(sent %d HTTP requests and %d bytes; '
+                b'received %d bytes in responses)\n'
             )
             % (reqs, sent, recv)
         )
@@ -501,28 +502,28 @@
         # have to stream bundle to a temp file because we do not have
         # http 1.1 chunked transfer.
 
-        types = self.capable('unbundle')
+        types = self.capable(b'unbundle')
         try:
-            types = types.split(',')
+            types = types.split(b',')
         except AttributeError:
             # servers older than d1b16a746db6 will send 'unbundle' as a
             # boolean capability. They only support headerless/uncompressed
             # bundles.
-            types = [""]
+            types = [b""]
         for x in types:
             if x in bundle2.bundletypes:
                 type = x
                 break
 
         tempname = bundle2.writebundle(self.ui, cg, None, type)
-        fp = httpconnection.httpsendfile(self.ui, tempname, "rb")
+        fp = httpconnection.httpsendfile(self.ui, tempname, b"rb")
         headers = {r'Content-Type': r'application/mercurial-0.1'}
 
         try:
             r = self._call(cmd, data=fp, headers=headers, **args)
-            vals = r.split('\n', 1)
+            vals = r.split(b'\n', 1)
             if len(vals) < 2:
-                raise error.ResponseError(_("unexpected response:"), r)
+                raise error.ResponseError(_(b"unexpected response:"), r)
             return vals
         except urlerr.httperror:
             # Catch and re-raise these so we don't try and treat them
@@ -531,7 +532,7 @@
             raise
         except socket.error as err:
             if err.args[0] in (errno.ECONNRESET, errno.EPIPE):
-                raise error.Abort(_('push failed: %s') % err.args[1])
+                raise error.Abort(_(b'push failed: %s') % err.args[1])
             raise error.Abort(err.args[1])
         finally:
             fp.close()
@@ -541,14 +542,14 @@
         filename = None
         try:
             # dump bundle to disk
-            fd, filename = pycompat.mkstemp(prefix="hg-bundle-", suffix=".hg")
+            fd, filename = pycompat.mkstemp(prefix=b"hg-bundle-", suffix=b".hg")
             with os.fdopen(fd, r"wb") as fh:
                 d = fp.read(4096)
                 while d:
                     fh.write(d)
                     d = fp.read(4096)
             # start http push
-            with httpconnection.httpsendfile(self.ui, filename, "rb") as fp_:
+            with httpconnection.httpsendfile(self.ui, filename, b"rb") as fp_:
                 headers = {r'Content-Type': r'application/mercurial-0.1'}
                 return self._callstream(cmd, data=fp_, headers=headers, **args)
         finally:
@@ -598,17 +599,17 @@
         ui, reactor, opener=opener, requestbuilder=requestbuilder
     )
 
-    url = '%s/%s' % (apiurl, permission)
+    url = b'%s/%s' % (apiurl, permission)
 
     if len(requests) > 1:
-        url += '/multirequest'
+        url += b'/multirequest'
     else:
-        url += '/%s' % requests[0][0]
+        url += b'/%s' % requests[0][0]
 
-    ui.debug('sending %d commands\n' % len(requests))
+    ui.debug(b'sending %d commands\n' % len(requests))
     for command, args, f in requests:
         ui.debug(
-            'sending command %s: %s\n'
+            b'sending command %s: %s\n'
             % (command, stringutil.pprint(args, indent=2))
         )
         assert not list(
@@ -631,7 +632,7 @@
         res = opener.open(req)
     except urlerr.httperror as e:
         if e.code == 401:
-            raise error.Abort(_('authorization failed'))
+            raise error.Abort(_(b'authorization failed'))
 
         raise
     except httplib.HTTPException as e:
@@ -683,32 +684,32 @@
     def callcommand(self, command, args):
         if self._sent:
             raise error.ProgrammingError(
-                'callcommand() cannot be used after ' 'commands are sent'
+                b'callcommand() cannot be used after ' b'commands are sent'
             )
 
         if self._closed:
             raise error.ProgrammingError(
-                'callcommand() cannot be used after ' 'close()'
+                b'callcommand() cannot be used after ' b'close()'
             )
 
         # The service advertises which commands are available. So if we attempt
         # to call an unknown command or pass an unknown argument, we can screen
         # for this.
-        if command not in self._descriptor['commands']:
+        if command not in self._descriptor[b'commands']:
             raise error.ProgrammingError(
-                'wire protocol command %s is not available' % command
+                b'wire protocol command %s is not available' % command
             )
 
-        cmdinfo = self._descriptor['commands'][command]
-        unknownargs = set(args.keys()) - set(cmdinfo.get('args', {}))
+        cmdinfo = self._descriptor[b'commands'][command]
+        unknownargs = set(args.keys()) - set(cmdinfo.get(b'args', {}))
 
         if unknownargs:
             raise error.ProgrammingError(
-                'wire protocol command %s does not accept argument: %s'
-                % (command, ', '.join(sorted(unknownargs)))
+                b'wire protocol command %s does not accept argument: %s'
+                % (command, b', '.join(sorted(unknownargs)))
             )
 
-        self._neededpermissions |= set(cmdinfo['permissions'])
+        self._neededpermissions |= set(cmdinfo[b'permissions'])
 
         # TODO we /could/ also validate types here, since the API descriptor
         # includes types...
@@ -756,16 +757,16 @@
 
         permissions = set(self._neededpermissions)
 
-        if 'push' in permissions and 'pull' in permissions:
-            permissions.remove('pull')
+        if b'push' in permissions and b'pull' in permissions:
+            permissions.remove(b'pull')
 
         if len(permissions) > 1:
             raise error.RepoError(
-                _('cannot make request requiring multiple ' 'permissions: %s')
-                % _(', ').join(sorted(permissions))
+                _(b'cannot make request requiring multiple ' b'permissions: %s')
+                % _(b', ').join(sorted(permissions))
             )
 
-        permission = {'push': 'rw', 'pull': 'ro',}[permissions.pop()]
+        permission = {b'push': b'rw', b'pull': b'ro',}[permissions.pop()]
 
         handler, resp = sendv2request(
             self._ui,
@@ -809,7 +810,7 @@
             for f in self._futures:
                 if not f.done():
                     f.set_exception(
-                        error.ResponseError(_('unfulfilled command response'))
+                        error.ResponseError(_(b'unfulfilled command response'))
                     )
 
             self._futures = None
@@ -832,12 +833,12 @@
         self.ui = ui
         self.apidescriptor = apidescriptor
 
-        if repourl.endswith('/'):
+        if repourl.endswith(b'/'):
             repourl = repourl[:-1]
 
         self._url = repourl
         self._apipath = apipath
-        self._apiurl = '%s/%s' % (repourl, apipath)
+        self._apiurl = b'%s/%s' % (repourl, apipath)
         self._opener = opener
         self._requestbuilder = requestbuilder
 
@@ -861,8 +862,8 @@
     def close(self):
         self.ui.note(
             _(
-                '(sent %d HTTP requests and %d bytes; '
-                'received %d bytes in responses)\n'
+                b'(sent %d HTTP requests and %d bytes; '
+                b'received %d bytes in responses)\n'
             )
             % (
                 self._opener.requestscount,
@@ -881,16 +882,22 @@
         # version 2 of that command works differently.
 
         # Maps to commands that are available.
-        if name in ('branchmap', 'getbundle', 'known', 'lookup', 'pushkey'):
+        if name in (
+            b'branchmap',
+            b'getbundle',
+            b'known',
+            b'lookup',
+            b'pushkey',
+        ):
             return True
 
         # Other concepts.
-        if name in 'bundle2':
+        if name in b'bundle2':
             return True
 
         # Alias command-* to presence of command of that name.
-        if name.startswith('command-'):
-            return name[len('command-') :] in self.apidescriptor['commands']
+        if name.startswith(b'command-'):
+            return name[len(b'command-') :] in self.apidescriptor[b'commands']
 
         return False
 
@@ -900,8 +907,8 @@
 
         raise error.CapabilityError(
             _(
-                'cannot %s; client or remote repository does not support the '
-                '\'%s\' capability'
+                b'cannot %s; client or remote repository does not support the '
+                b'\'%s\' capability'
             )
             % (purpose, name)
         )
@@ -935,7 +942,7 @@
 #    Integer priority for the service. If we could choose from multiple
 #    services, we choose the one with the highest priority.
 API_PEERS = {
-    wireprototypes.HTTP_WIREPROTO_V2: {'init': httpv2peer, 'priority': 50,},
+    wireprototypes.HTTP_WIREPROTO_V2: {b'init': httpv2peer, b'priority': 50,},
 }
 
 
@@ -945,7 +952,7 @@
     caps = None
 
     def capable(x):
-        raise error.ProgrammingError('should not be called')
+        raise error.ProgrammingError(b'should not be called')
 
     args = {}
 
@@ -954,17 +961,17 @@
     # X-HgProto-* header advertising which serializing formats it supports.
     # We only support the HTTP version 2 transport and CBOR responses for
     # now.
-    advertisev2 = ui.configbool('experimental', 'httppeer.advertise-v2')
+    advertisev2 = ui.configbool(b'experimental', b'httppeer.advertise-v2')
 
     if advertisev2:
-        args['headers'] = {
+        args[b'headers'] = {
             r'X-HgProto-1': r'cbor',
         }
 
-        args['headers'].update(
+        args[b'headers'].update(
             encodevalueinheaders(
-                ' '.join(sorted(API_PEERS)),
-                'X-HgUpgrade',
+                b' '.join(sorted(API_PEERS)),
+                b'X-HgUpgrade',
                 # We don't know the header limit this early.
                 # So make it small.
                 1024,
@@ -972,7 +979,7 @@
         )
 
     req, requrl, qs = makev1commandrequest(
-        ui, requestbuilder, caps, capable, url, 'capabilities', args
+        ui, requestbuilder, caps, capable, url, b'capabilities', args
     )
     resp = sendrequest(ui, opener, req)
 
@@ -994,7 +1001,7 @@
         )
     except RedirectedRepoError as e:
         req, requrl, qs = makev1commandrequest(
-            ui, requestbuilder, caps, capable, e.respurl, 'capabilities', args
+            ui, requestbuilder, caps, capable, e.respurl, b'capabilities', args
         )
         resp = sendrequest(ui, opener, req)
         respurl, ct, resp = parsev1commandresponse(
@@ -1006,32 +1013,32 @@
     finally:
         resp.close()
 
-    if not ct.startswith('application/mercurial-'):
-        raise error.ProgrammingError('unexpected content-type: %s' % ct)
+    if not ct.startswith(b'application/mercurial-'):
+        raise error.ProgrammingError(b'unexpected content-type: %s' % ct)
 
     if advertisev2:
-        if ct == 'application/mercurial-cbor':
+        if ct == b'application/mercurial-cbor':
             try:
                 info = cborutil.decodeall(rawdata)[0]
             except cborutil.CBORDecodeError:
                 raise error.Abort(
-                    _('error decoding CBOR from remote server'),
+                    _(b'error decoding CBOR from remote server'),
                     hint=_(
-                        'try again and consider contacting '
-                        'the server operator'
+                        b'try again and consider contacting '
+                        b'the server operator'
                     ),
                 )
 
         # We got a legacy response. That's fine.
-        elif ct in ('application/mercurial-0.1', 'application/mercurial-0.2'):
-            info = {'v1capabilities': set(rawdata.split())}
+        elif ct in (b'application/mercurial-0.1', b'application/mercurial-0.2'):
+            info = {b'v1capabilities': set(rawdata.split())}
 
         else:
             raise error.RepoError(
-                _('unexpected response type from server: %s') % ct
+                _(b'unexpected response type from server: %s') % ct
             )
     else:
-        info = {'v1capabilities': set(rawdata.split())}
+        info = {b'v1capabilities': set(rawdata.split())}
 
     return respurl, info
 
@@ -1048,12 +1055,12 @@
     u = util.url(path)
     if u.query or u.fragment:
         raise error.Abort(
-            _('unsupported URL component: "%s"') % (u.query or u.fragment)
+            _(b'unsupported URL component: "%s"') % (u.query or u.fragment)
         )
 
     # urllib cannot handle URLs with embedded user or passwd.
     url, authinfo = u.authinfo()
-    ui.debug('using %s\n' % url)
+    ui.debug(b'using %s\n' % url)
 
     opener = opener or urlmod.opener(ui, authinfo)
 
@@ -1068,32 +1075,32 @@
     # capabilities, we could filter out services not meeting the
     # requirements. Possibly by consulting the interfaces defined by the
     # peer type.
-    apipeerchoices = set(info.get('apis', {}).keys()) & set(API_PEERS.keys())
+    apipeerchoices = set(info.get(b'apis', {}).keys()) & set(API_PEERS.keys())
 
     preferredchoices = sorted(
-        apipeerchoices, key=lambda x: API_PEERS[x]['priority'], reverse=True
+        apipeerchoices, key=lambda x: API_PEERS[x][b'priority'], reverse=True
     )
 
     for service in preferredchoices:
-        apipath = '%s/%s' % (info['apibase'].rstrip('/'), service)
+        apipath = b'%s/%s' % (info[b'apibase'].rstrip(b'/'), service)
 
-        return API_PEERS[service]['init'](
-            ui, respurl, apipath, opener, requestbuilder, info['apis'][service]
+        return API_PEERS[service][b'init'](
+            ui, respurl, apipath, opener, requestbuilder, info[b'apis'][service]
         )
 
     # Failed to construct an API peer. Fall back to legacy.
     return httppeer(
-        ui, path, respurl, opener, requestbuilder, info['v1capabilities']
+        ui, path, respurl, opener, requestbuilder, info[b'v1capabilities']
     )
 
 
 def instance(ui, path, create, intents=None, createopts=None):
     if create:
-        raise error.Abort(_('cannot create new http repository'))
+        raise error.Abort(_(b'cannot create new http repository'))
     try:
-        if path.startswith('https:') and not urlmod.has_https:
+        if path.startswith(b'https:') and not urlmod.has_https:
             raise error.Abort(
-                _('Python support for SSL and HTTPS ' 'is not installed')
+                _(b'Python support for SSL and HTTPS ' b'is not installed')
             )
 
         inst = makepeer(ui, path)
@@ -1101,8 +1108,8 @@
         return inst
     except error.RepoError as httpexception:
         try:
-            r = statichttprepo.instance(ui, "static-" + path, create)
-            ui.note(_('(falling back to static-http)\n'))
+            r = statichttprepo.instance(ui, b"static-" + path, create)
+            ui.note(_(b'(falling back to static-http)\n'))
             return r
         except error.RepoError:
             raise httpexception  # use the original http RepoError instead
--- a/mercurial/i18n.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/i18n.py	Sun Oct 06 09:48:39 2019 -0400
@@ -26,10 +26,10 @@
 _languages = None
 if (
     pycompat.iswindows
-    and 'LANGUAGE' not in encoding.environ
-    and 'LC_ALL' not in encoding.environ
-    and 'LC_MESSAGES' not in encoding.environ
-    and 'LANG' not in encoding.environ
+    and b'LANGUAGE' not in encoding.environ
+    and b'LC_ALL' not in encoding.environ
+    and b'LC_MESSAGES' not in encoding.environ
+    and b'LANG' not in encoding.environ
 ):
     # Try to detect UI language by "User Interface Language Management" API
     # if no locale variables are set. Note that locale.getdefaultlocale()
@@ -83,7 +83,7 @@
         else:
             # should be ascii, but we have unicode docstrings in test, which
             # are converted to utf-8 bytes on Python 3.
-            paragraphs = [p.decode("utf-8") for p in message.split('\n\n')]
+            paragraphs = [p.decode("utf-8") for p in message.split(b'\n\n')]
         # Be careful not to translate the empty string -- it holds the
         # meta data of the .po file.
         u = u'\n\n'.join([p and _ugettext(p) or u'' for p in paragraphs])
@@ -103,12 +103,12 @@
 
 def _plain():
     if (
-        'HGPLAIN' not in encoding.environ
-        and 'HGPLAINEXCEPT' not in encoding.environ
+        b'HGPLAIN' not in encoding.environ
+        and b'HGPLAINEXCEPT' not in encoding.environ
     ):
         return False
-    exceptions = encoding.environ.get('HGPLAINEXCEPT', '').strip().split(',')
-    return 'i18n' not in exceptions
+    exceptions = encoding.environ.get(b'HGPLAINEXCEPT', b'').strip().split(b',')
+    return b'i18n' not in exceptions
 
 
 if _plain():
--- a/mercurial/interfaces/dirstate.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/interfaces/dirstate.py	Sun Oct 06 09:48:39 2019 -0400
@@ -24,7 +24,7 @@
         """Return a list of files containing patterns to ignore."""
 
     def _ignorefileandline(f):
-        "Given a file `f`, return the ignore file and line that ignores it."
+        b"Given a file `f`, return the ignore file and line that ignores it."
 
     _checklink = interfaceutil.Attribute("""Callable for checking symlinks.""")
     _checkexec = interfaceutil.Attribute("""Callable for checking exec bits.""")
--- a/mercurial/interfaces/repository.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/interfaces/repository.py	Sun Oct 06 09:48:39 2019 -0400
@@ -13,7 +13,7 @@
 
 # When narrowing is finalized and no longer subject to format changes,
 # we should move this to just "narrow" or similar.
-NARROW_REQUIREMENT = 'narrowhg-experimental'
+NARROW_REQUIREMENT = b'narrowhg-experimental'
 
 # Local repository feature string.
 
@@ -352,7 +352,7 @@
         if name in caps:
             return True
 
-        name = '%s=' % name
+        name = b'%s=' % name
         for cap in caps:
             if cap.startswith(name):
                 return cap[len(name) :]
@@ -365,8 +365,8 @@
 
         raise error.CapabilityError(
             _(
-                'cannot %s; remote repository does not support the '
-                '\'%s\' capability'
+                b'cannot %s; remote repository does not support the '
+                b'\'%s\' capability'
             )
             % (purpose, name)
         )
@@ -1025,7 +1025,7 @@
     def get(path, default=None):
         """Obtain the node value for a path or a default value if missing."""
 
-    def flags(path, default=''):
+    def flags(path, default=b''):
         """Return the flags value for a path or a default value if missing."""
 
     def copy():
@@ -1746,7 +1746,7 @@
         pass
 
     def commit(
-        text='',
+        text=b'',
         user=None,
         date=None,
         match=None,
@@ -1766,7 +1766,7 @@
         """Inform the repository that nodes have been destroyed."""
 
     def status(
-        node1='.',
+        node1=b'.',
         node2=None,
         match=None,
         ignored=False,
--- a/mercurial/interfaces/util.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/interfaces/util.py	Sun Oct 06 09:48:39 2019 -0400
@@ -13,7 +13,7 @@
 
 from .. import encoding
 
-if encoding.environ.get('HGREALINTERFACES'):
+if encoding.environ.get(b'HGREALINTERFACES'):
     from ..thirdparty.zope import interface as zi
 
     Attribute = zi.Attribute
@@ -22,7 +22,7 @@
 else:
 
     class Attribute(object):
-        def __init__(self, __name__, __doc__=''):
+        def __init__(self, __name__, __doc__=b''):
             pass
 
     class Interface(object):
--- a/mercurial/keepalive.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/keepalive.py	Sun Oct 06 09:48:39 2019 -0400
@@ -215,7 +215,7 @@
     def do_open(self, http_class, req):
         host = urllibcompat.gethost(req)
         if not host:
-            raise urlerr.urlerror('no host given')
+            raise urlerr.urlerror(b'no host given')
 
         try:
             h = self._cm.get_ready_conn(host)
@@ -237,7 +237,7 @@
                 h = http_class(host, timeout=self._timeout)
                 if DEBUG:
                     DEBUG.info(
-                        "creating new connection to %s (%d)", host, id(h)
+                        b"creating new connection to %s (%d)", host, id(h)
                     )
                 self._cm.add(host, h, False)
                 self._start_transaction(h, req)
@@ -246,7 +246,7 @@
         # to make the error message slightly more useful.
         except httplib.BadStatusLine as err:
             raise urlerr.urlerror(
-                _('bad HTTP status line: %s') % pycompat.sysbytes(err.line)
+                _(b'bad HTTP status line: %s') % pycompat.sysbytes(err.line)
             )
         except (socket.error, httplib.HTTPException) as err:
             raise urlerr.urlerror(err)
@@ -258,7 +258,7 @@
             self._cm.remove(h)
 
         if DEBUG:
-            DEBUG.info("STATUS: %s, %s", r.status, r.reason)
+            DEBUG.info(b"STATUS: %s, %s", r.status, r.reason)
         r._handler = self
         r._host = host
         r._url = req.get_full_url()
@@ -295,7 +295,7 @@
             # a DIFFERENT exception
             if DEBUG:
                 DEBUG.error(
-                    "unexpected exception - closing " "connection to %s (%d)",
+                    b"unexpected exception - closing " b"connection to %s (%d)",
                     host,
                     id(h),
                 )
@@ -310,12 +310,12 @@
             # last used the connection.
             if DEBUG:
                 DEBUG.info(
-                    "failed to re-use connection to %s (%d)", host, id(h)
+                    b"failed to re-use connection to %s (%d)", host, id(h)
                 )
             r = None
         else:
             if DEBUG:
-                DEBUG.info("re-using connection to %s (%d)", host, id(h))
+                DEBUG.info(b"re-using connection to %s (%d)", host, id(h))
 
         return r
 
@@ -408,7 +408,7 @@
         self.fileno = sock.fileno
         self.code = None
         self.receivedbytescount = 0
-        self._rbuf = ''
+        self._rbuf = b''
         self._rbufsize = 8096
         self._handler = None  # inserted by the handler later
         self._host = None  # (same)
@@ -460,7 +460,7 @@
         # implemented using readinto(), which can duplicate self._rbuf
         # if it's not empty.
         s = self._rbuf
-        self._rbuf = ''
+        self._rbuf = b''
         data = self._raw_read(amt)
 
         self.receivedbytescount += len(data)
@@ -484,7 +484,7 @@
         while True:
             if chunk_left is None:
                 line = self.fp.readline()
-                i = line.find(';')
+                i = line.find(b';')
                 if i >= 0:
                     line = line[:i]  # strip chunk-extensions
                 try:
@@ -493,7 +493,7 @@
                     # close the connection as protocol synchronization is
                     # probably lost
                     self.close()
-                    raise httplib.IncompleteRead(''.join(parts))
+                    raise httplib.IncompleteRead(b''.join(parts))
                 if chunk_left == 0:
                     break
             if amt is None:
@@ -501,12 +501,12 @@
             elif amt < chunk_left:
                 parts.append(self._safe_read(amt))
                 self.chunk_left = chunk_left - amt
-                return ''.join(parts)
+                return b''.join(parts)
             elif amt == chunk_left:
                 parts.append(self._safe_read(amt))
                 self._safe_read(2)  # toss the CRLF at the end of the chunk
                 self.chunk_left = None
-                return ''.join(parts)
+                return b''.join(parts)
             else:
                 parts.append(self._safe_read(chunk_left))
                 amt -= chunk_left
@@ -523,17 +523,17 @@
                 # a vanishingly small number of sites EOF without
                 # sending the trailer
                 break
-            if line == '\r\n':
+            if line == b'\r\n':
                 break
 
         # we read everything; close the "file"
         self.close()
 
-        return ''.join(parts)
+        return b''.join(parts)
 
     def readline(self):
         # Fast path for a line is already available in read buffer.
-        i = self._rbuf.find('\n')
+        i = self._rbuf.find(b'\n')
         if i >= 0:
             i += 1
             line = self._rbuf[:i]
@@ -557,7 +557,7 @@
                 pass
 
             chunks.append(new)
-            i = new.find('\n')
+            i = new.find(b'\n')
             if i >= 0:
                 break
 
@@ -565,13 +565,13 @@
 
         # EOF
         if i == -1:
-            self._rbuf = ''
-            return ''.join(chunks)
+            self._rbuf = b''
+            return b''.join(chunks)
 
         i += 1
         self._rbuf = chunks[-1][i:]
         chunks[-1] = chunks[-1][:i]
-        return ''.join(chunks)
+        return b''.join(chunks)
 
     def readlines(self, sizehint=0):
         total = 0
@@ -611,7 +611,7 @@
 
         dest[0:have] = self._rbuf
         got += len(self._rbuf)
-        self._rbuf = ''
+        self._rbuf = b''
         return got
 
 
@@ -642,13 +642,13 @@
     # NOTE: we DO propagate the error, though, because we cannot simply
     #       ignore the error... the caller will know if they can retry.
     if self.debuglevel > 0:
-        print("send:", repr(str))
+        print(b"send:", repr(str))
     try:
         blocksize = 8192
         read = getattr(str, 'read', None)
         if read is not None:
             if self.debuglevel > 0:
-                print("sending a read()able")
+                print(b"sending a read()able")
             data = read(blocksize)
             while data:
                 self.sock.sendall(data)
@@ -710,7 +710,7 @@
 
 def continuity(url):
     md5 = hashlib.md5
-    format = '%25s: %s'
+    format = b'%25s: %s'
 
     # first fetch the file with the normal http handler
     opener = urlreq.buildopener()
@@ -719,7 +719,7 @@
     foo = fo.read()
     fo.close()
     m = md5(foo)
-    print(format % ('normal urllib', node.hex(m.digest())))
+    print(format % (b'normal urllib', node.hex(m.digest())))
 
     # now install the keepalive handler and try again
     opener = urlreq.buildopener(HTTPHandler())
@@ -729,10 +729,10 @@
     foo = fo.read()
     fo.close()
     m = md5(foo)
-    print(format % ('keepalive read', node.hex(m.digest())))
+    print(format % (b'keepalive read', node.hex(m.digest())))
 
     fo = urlreq.urlopen(url)
-    foo = ''
+    foo = b''
     while True:
         f = fo.readline()
         if f:
@@ -741,26 +741,26 @@
             break
     fo.close()
     m = md5(foo)
-    print(format % ('keepalive readline', node.hex(m.digest())))
+    print(format % (b'keepalive readline', node.hex(m.digest())))
 
 
 def comp(N, url):
-    print('  making %i connections to:\n  %s' % (N, url))
+    print(b'  making %i connections to:\n  %s' % (N, url))
 
-    procutil.stdout.write('  first using the normal urllib handlers')
+    procutil.stdout.write(b'  first using the normal urllib handlers')
     # first use normal opener
     opener = urlreq.buildopener()
     urlreq.installopener(opener)
     t1 = fetch(N, url)
-    print('  TIME: %.3f s' % t1)
+    print(b'  TIME: %.3f s' % t1)
 
-    procutil.stdout.write('  now using the keepalive handler       ')
+    procutil.stdout.write(b'  now using the keepalive handler       ')
     # now install the keepalive handler and try again
     opener = urlreq.buildopener(HTTPHandler())
     urlreq.installopener(opener)
     t2 = fetch(N, url)
-    print('  TIME: %.3f s' % t2)
-    print('  improvement factor: %.2f' % (t1 / t2))
+    print(b'  TIME: %.3f s' % t2)
+    print(b'  improvement factor: %.2f' % (t1 / t2))
 
 
 def fetch(N, url, delay=0):
@@ -781,7 +781,7 @@
     for i in lens[1:]:
         j = j + 1
         if not i == lens[0]:
-            print("WARNING: inconsistent length on read %i: %i" % (j, i))
+            print(b"WARNING: inconsistent length on read %i: %i" % (j, i))
 
     return diff
 
@@ -797,41 +797,41 @@
         info = warning = error = debug
 
     DEBUG = FakeLogger()
-    print("  fetching the file to establish a connection")
+    print(b"  fetching the file to establish a connection")
     fo = urlreq.urlopen(url)
     data1 = fo.read()
     fo.close()
 
     i = 20
-    print("  waiting %i seconds for the server to close the connection" % i)
+    print(b"  waiting %i seconds for the server to close the connection" % i)
     while i > 0:
-        procutil.stdout.write('\r  %2i' % i)
+        procutil.stdout.write(b'\r  %2i' % i)
         procutil.stdout.flush()
         time.sleep(1)
         i -= 1
-    procutil.stderr.write('\r')
+    procutil.stderr.write(b'\r')
 
-    print("  fetching the file a second time")
+    print(b"  fetching the file a second time")
     fo = urlreq.urlopen(url)
     data2 = fo.read()
     fo.close()
 
     if data1 == data2:
-        print('  data are identical')
+        print(b'  data are identical')
     else:
-        print('  ERROR: DATA DIFFER')
+        print(b'  ERROR: DATA DIFFER')
 
     DEBUG = dbbackup
 
 
 def test(url, N=10):
-    print("performing continuity test (making sure stuff isn't corrupted)")
+    print(b"performing continuity test (making sure stuff isn't corrupted)")
     continuity(url)
-    print('')
-    print("performing speed comparison")
+    print(b'')
+    print(b"performing speed comparison")
     comp(N, url)
-    print('')
-    print("performing dropped-connection check")
+    print(b'')
+    print(b"performing dropped-connection check")
     test_timeout(url)
 
 
@@ -842,6 +842,6 @@
         N = int(sys.argv[1])
         url = sys.argv[2]
     except (IndexError, ValueError):
-        print("%s <integer> <url>" % sys.argv[0])
+        print(b"%s <integer> <url>" % sys.argv[0])
     else:
         test(url, N)
--- a/mercurial/linelog.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/linelog.py	Sun Oct 06 09:48:39 2019 -0400
@@ -26,7 +26,7 @@
 from .thirdparty import attr
 from . import pycompat
 
-_llentry = struct.Struct('>II')
+_llentry = struct.Struct(b'>II')
 
 
 class LineLogError(Exception):
@@ -122,7 +122,7 @@
 
     def __init__(self, op1, op2):
         if op1 != 0:
-            raise LineLogError("malformed JUMP, op1 must be 0, got %d" % op1)
+            raise LineLogError(b"malformed JUMP, op1 must be 0, got %d" % op1)
         self._target = op2
 
     def __str__(self):
@@ -143,9 +143,9 @@
 
     def __init__(self, op1, op2):
         if op1 != 0:
-            raise LineLogError("malformed EOF, op1 must be 0, got %d" % op1)
+            raise LineLogError(b"malformed EOF, op1 must be 0, got %d" % op1)
         if op2 != 0:
-            raise LineLogError("malformed EOF, op2 must be 0, got %d" % op2)
+            raise LineLogError(b"malformed EOF, op2 must be 0, got %d" % op2)
 
     def __str__(self):
         return r'EOF'
@@ -218,7 +218,7 @@
     try:
         op1, op2 = _llentry.unpack_from(data, offset)
     except struct.error as e:
-        raise LineLogError('reading an instruction failed: %r' % e)
+        raise LineLogError(b'reading an instruction failed: %r' % e)
     opcode = op1 & 0b11
     op1 = op1 >> 2
     if opcode == 0:
@@ -231,7 +231,7 @@
         return _jl(op1, op2)
     elif opcode == 2:
         return _line(op1, op2)
-    raise NotImplementedError('Unimplemented opcode %r' % opcode)
+    raise NotImplementedError(b'Unimplemented opcode %r' % opcode)
 
 
 class linelog(object):
@@ -255,7 +255,7 @@
         )
 
     def __repr__(self):
-        return '<linelog at %s: maxrev=%d size=%d>' % (
+        return b'<linelog at %s: maxrev=%d size=%d>' % (
             hex(id(self)),
             self._maxrev,
             len(self._program),
@@ -263,7 +263,7 @@
 
     def debugstr(self):
         fmt = r'%%%dd %%s' % len(str(len(self._program)))
-        return pycompat.sysstr('\n').join(
+        return pycompat.sysstr(b'\n').join(
             fmt % (idx, i) for idx, i in enumerate(self._program[1:], 1)
         )
 
@@ -271,7 +271,7 @@
     def fromdata(cls, buf):
         if len(buf) % _llentry.size != 0:
             raise LineLogError(
-                "invalid linelog buffer size %d (must be a multiple of %d)"
+                b"invalid linelog buffer size %d (must be a multiple of %d)"
                 % (len(buf), _llentry.size)
             )
         expected = len(buf) / _llentry.size
@@ -283,8 +283,8 @@
         numentries = fakejge._target
         if expected != numentries:
             raise LineLogError(
-                "corrupt linelog data: claimed"
-                " %d entries but given data for %d entries"
+                b"corrupt linelog data: claimed"
+                b" %d entries but given data for %d entries"
                 % (expected, numentries)
             )
         instructions = [_eof(0, 0)]
@@ -294,7 +294,7 @@
 
     def encode(self):
         hdr = _jge(self._maxrev, len(self._program)).encode()
-        return hdr + ''.join(i.encode() for i in self._program[1:])
+        return hdr + b''.join(i.encode() for i in self._program[1:])
 
     def clear(self):
         self._program = []
@@ -320,7 +320,7 @@
             #        ar = self.annotate(self._maxrev)
         if a1 > len(ar.lines):
             raise LineLogError(
-                '%d contains %d lines, tried to access line %d'
+                b'%d contains %d lines, tried to access line %d'
                 % (rev, len(ar.lines), a1)
             )
         elif a1 == len(ar.lines):
@@ -356,7 +356,7 @@
         if a1 < a2:
             if a2 > len(ar.lines):
                 raise LineLogError(
-                    '%d contains %d lines, tried to access line %d'
+                    b'%d contains %d lines, tried to access line %d'
                     % (rev, len(ar.lines), a2)
                 )
             elif a2 == len(ar.lines):
@@ -454,8 +454,8 @@
             elif isinstance(inst, _line):
                 lines.append((inst._rev, inst._origlineno))
             else:
-                raise LineLogError("Illegal instruction %r" % inst)
+                raise LineLogError(b"Illegal instruction %r" % inst)
             if nextpc == end:
                 return lines
             pc = nextpc
-        raise LineLogError("Failed to perform getalllines")
+        raise LineLogError(b"Failed to perform getalllines")
--- a/mercurial/localrepo.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/localrepo.py	Sun Oct 06 09:48:39 2019 -0400
@@ -111,7 +111,7 @@
     def __init__(self, *paths):
         super(repofilecache, self).__init__(*paths)
         for path in paths:
-            _cachedfiles.add((path, 'plain'))
+            _cachedfiles.add((path, b'plain'))
 
     def join(self, obj, fname):
         return obj.vfs.join(fname)
@@ -123,7 +123,7 @@
     def __init__(self, *paths):
         super(storecache, self).__init__(*paths)
         for path in paths:
-            _cachedfiles.add((path, ''))
+            _cachedfiles.add((path, b''))
 
     def join(self, obj, fname):
         return obj.sjoin(fname)
@@ -140,12 +140,12 @@
 
     def join(self, obj, fnameandlocation):
         fname, location = fnameandlocation
-        if location == 'plain':
+        if location == b'plain':
             return obj.vfs.join(fname)
         else:
-            if location != '':
+            if location != b'':
                 raise error.ProgrammingError(
-                    'unexpected location: %s' % location
+                    b'unexpected location: %s' % location
                 )
             return obj.sjoin(fname)
 
@@ -193,14 +193,14 @@
 
 
 moderncaps = {
-    'lookup',
-    'branchmap',
-    'pushkey',
-    'known',
-    'getbundle',
-    'unbundle',
+    b'lookup',
+    b'branchmap',
+    b'pushkey',
+    b'known',
+    b'getbundle',
+    b'unbundle',
 }
-legacycaps = moderncaps.union({'changegroupsubset'})
+legacycaps = moderncaps.union({b'changegroupsubset'})
 
 
 @interfaceutil.implementer(repository.ipeercommandexecutor)
@@ -219,12 +219,12 @@
     def callcommand(self, command, args):
         if self._sent:
             raise error.ProgrammingError(
-                'callcommand() cannot be used after ' 'sendcommands()'
+                b'callcommand() cannot be used after ' b'sendcommands()'
             )
 
         if self._closed:
             raise error.ProgrammingError(
-                'callcommand() cannot be used after ' 'close()'
+                b'callcommand() cannot be used after ' b'close()'
             )
 
         # We don't need to support anything fancy. Just call the named
@@ -258,7 +258,7 @@
 
         if caps is None:
             caps = moderncaps.copy()
-        self._repo = repo.filtered('served')
+        self._repo = repo.filtered(b'served')
         self.ui = repo.ui
         self._caps = repo._restrictcapabilities(caps)
 
@@ -290,11 +290,11 @@
         return self._caps
 
     def clonebundles(self):
-        return self._repo.tryread('clonebundles.manifest')
+        return self._repo.tryread(b'clonebundles.manifest')
 
     def debugwireargs(self, one, two, three=None, four=None, five=None):
         """Used to test argument passing over the wire"""
-        return "%s %s %s %s %s" % (
+        return b"%s %s %s %s %s" % (
             one,
             two,
             pycompat.bytestr(three),
@@ -321,7 +321,7 @@
             # from it in local peer.
             return bundle2.getunbundler(self.ui, cb)
         else:
-            return changegroup.getunbundler('01', cb, None)
+            return changegroup.getunbundler(b'01', cb, None)
 
     def heads(self):
         return self._repo.heads()
@@ -340,7 +340,7 @@
 
     def stream_out(self):
         raise error.Abort(
-            _('cannot perform stream clone against local ' 'peer')
+            _(b'cannot perform stream clone against local ' b'peer')
         )
 
     def unbundle(self, bundle, heads, url):
@@ -350,8 +350,8 @@
         try:
             try:
                 bundle = exchange.readbundle(self.ui, bundle, None)
-                ret = exchange.unbundle(self._repo, bundle, heads, 'push', url)
-                if util.safehasattr(ret, 'getchunks'):
+                ret = exchange.unbundle(self._repo, bundle, heads, b'push', url)
+                if util.safehasattr(ret, b'getchunks'):
                     # This is a bundle20 object, turn it into an unbundler.
                     # This little dance should be dropped eventually when the
                     # API is finally improved.
@@ -377,7 +377,7 @@
                 raise
         except error.PushRaced as exc:
             raise error.ResponseError(
-                _('push failed:'), stringutil.forcebytestr(exc)
+                _(b'push failed:'), stringutil.forcebytestr(exc)
             )
 
     # End of _basewirecommands interface.
@@ -410,20 +410,20 @@
         outgoing = discovery.outgoing(
             self._repo, missingroots=nodes, missingheads=self._repo.heads()
         )
-        return changegroup.makechangegroup(self._repo, outgoing, '01', source)
+        return changegroup.makechangegroup(self._repo, outgoing, b'01', source)
 
     def changegroupsubset(self, bases, heads, source):
         outgoing = discovery.outgoing(
             self._repo, missingroots=bases, missingheads=heads
         )
-        return changegroup.makechangegroup(self._repo, outgoing, '01', source)
+        return changegroup.makechangegroup(self._repo, outgoing, b'01', source)
 
     # End of baselegacywirecommands interface.
 
 
 # Increment the sub-version when the revlog v2 format changes to lock out old
 # clients.
-REVLOGV2_REQUIREMENT = 'exp-revlogv2.1'
+REVLOGV2_REQUIREMENT = b'exp-revlogv2.1'
 
 # A repository with the sparserevlog feature will have delta chains that
 # can spread over a larger span. Sparse reading cuts these large spans into
@@ -432,11 +432,11 @@
 # huge amounts of memory, because the whole span would be read at once,
 # including all the intermediate revisions that aren't pertinent for the chain.
 # This is why once a repository has enabled sparse-read, it becomes required.
-SPARSEREVLOG_REQUIREMENT = 'sparserevlog'
+SPARSEREVLOG_REQUIREMENT = b'sparserevlog'
 
 # A repository with the sidedataflag requirement will allow to store extra
 # information for revision without altering their original hashes.
-SIDEDATA_REQUIREMENT = 'exp-sidedata-flag'
+SIDEDATA_REQUIREMENT = b'exp-sidedata-flag'
 
 # Functions receiving (ui, features) that extensions can register to impact
 # the ability to load repositories with custom requirements. Only
@@ -627,7 +627,7 @@
 
         if not isinstance(typ, type):
             raise error.ProgrammingError(
-                'unable to construct type for %s' % iface
+                b'unable to construct type for %s' % iface
             )
 
         bases.append(typ)
@@ -700,7 +700,7 @@
 
         for name in names:
             if not ui.hasconfig(b'extensions', name):
-                ui.setconfig(b'extensions', name, b'', source='autoload')
+                ui.setconfig(b'extensions', name, b'', source=b'autoload')
 
 
 def gathersupportedrequirements(ui):
@@ -721,7 +721,7 @@
         engine = util.compengines[name]
         if engine.available() and engine.revlogheader():
             supported.add(b'exp-compression-%s' % name)
-            if engine.name() == 'zstd':
+            if engine.name() == b'zstd':
                 supported.add(b'revlog-compression-zstd')
 
     return supported
@@ -817,12 +817,12 @@
     if b'revlogv1' in requirements or REVLOGV2_REQUIREMENT in requirements:
         options.update(resolverevlogstorevfsoptions(ui, requirements, features))
     else:  # explicitly mark repo as using revlogv0
-        options['revlogv0'] = True
-
-    writecopiesto = ui.config('experimental', 'copies.write-to')
-    copiesextramode = ('changeset-only', 'compatibility')
+        options[b'revlogv0'] = True
+
+    writecopiesto = ui.config(b'experimental', b'copies.write-to')
+    copiesextramode = (b'changeset-only', b'compatibility')
     if writecopiesto in copiesextramode:
-        options['copies-storage'] = 'extra'
+        options[b'copies-storage'] = b'extra'
 
     return options
 
@@ -901,18 +901,18 @@
         #
         # The compression used for new entries will be "the last one"
         prefix = r.startswith
-        if prefix('revlog-compression-') or prefix('exp-compression-'):
-            options[b'compengine'] = r.split('-', 2)[2]
+        if prefix(b'revlog-compression-') or prefix(b'exp-compression-'):
+            options[b'compengine'] = r.split(b'-', 2)[2]
 
     options[b'zlib.level'] = ui.configint(b'storage', b'revlog.zlib.level')
     if options[b'zlib.level'] is not None:
         if not (0 <= options[b'zlib.level'] <= 9):
-            msg = _('invalid value for `storage.revlog.zlib.level` config: %d')
+            msg = _(b'invalid value for `storage.revlog.zlib.level` config: %d')
             raise error.Abort(msg % options[b'zlib.level'])
     options[b'zstd.level'] = ui.configint(b'storage', b'revlog.zstd.level')
     if options[b'zstd.level'] is not None:
         if not (0 <= options[b'zstd.level'] <= 22):
-            msg = _('invalid value for `storage.revlog.zstd.level` config: %d')
+            msg = _(b'invalid value for `storage.revlog.zstd.level` config: %d')
             raise error.Abort(msg % options[b'zstd.level'])
 
     if repository.NARROW_REQUIREMENT in requirements:
@@ -992,22 +992,22 @@
     #    being successful (repository sizes went up due to worse delta
     #    chains), and the code was deleted in 4.6.
     supportedformats = {
-        'revlogv1',
-        'generaldelta',
-        'treemanifest',
+        b'revlogv1',
+        b'generaldelta',
+        b'treemanifest',
         REVLOGV2_REQUIREMENT,
         SIDEDATA_REQUIREMENT,
         SPARSEREVLOG_REQUIREMENT,
         bookmarks.BOOKMARKS_IN_STORE_REQUIREMENT,
     }
     _basesupported = supportedformats | {
-        'store',
-        'fncache',
-        'shared',
-        'relshared',
-        'dotencode',
-        'exp-sparse',
-        'internal-phase',
+        b'store',
+        b'fncache',
+        b'shared',
+        b'relshared',
+        b'dotencode',
+        b'exp-sparse',
+        b'internal-phase',
     }
 
     # list of prefix for file which can be written without 'wlock'
@@ -1017,17 +1017,17 @@
         # two, but pretty much all the existing code assume
         # wlock is not needed so we keep them excluded for
         # now.
-        'hgrc',
-        'requires',
+        b'hgrc',
+        b'requires',
         # XXX cache is a complicatged business someone
         # should investigate this in depth at some point
-        'cache/',
+        b'cache/',
         # XXX shouldn't be dirstate covered by the wlock?
-        'dirstate',
+        b'dirstate',
         # XXX bisect was still a bit too messy at the time
         # this changeset was introduced. Someone should fix
         # the remainig bit and drop this line
-        'bisect.state',
+        b'bisect.state',
     }
 
     def __init__(
@@ -1117,8 +1117,8 @@
 
         self.filtername = None
 
-        if self.ui.configbool('devel', 'all-warnings') or self.ui.configbool(
-            'devel', 'check-locks'
+        if self.ui.configbool(b'devel', b'all-warnings') or self.ui.configbool(
+            b'devel', b'check-locks'
         ):
             self.vfs.audit = self._getvfsward(self.vfs.audit)
         # A list of callback to shape the phase if no data were found.
@@ -1131,10 +1131,10 @@
         self.spath = self.store.path
         self.svfs = self.store.vfs
         self.sjoin = self.store.join
-        if self.ui.configbool('devel', 'all-warnings') or self.ui.configbool(
-            'devel', 'check-locks'
+        if self.ui.configbool(b'devel', b'all-warnings') or self.ui.configbool(
+            b'devel', b'check-locks'
         ):
-            if util.safehasattr(self.svfs, 'vfs'):  # this is filtervfs
+            if util.safehasattr(self.svfs, b'vfs'):  # this is filtervfs
                 self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit)
             else:  # standard vfs
                 self.svfs.audit = self._getsvfsward(self.svfs.audit)
@@ -1184,25 +1184,25 @@
             repo = rref()
             if (
                 repo is None
-                or not util.safehasattr(repo, '_wlockref')
-                or not util.safehasattr(repo, '_lockref')
+                or not util.safehasattr(repo, b'_wlockref')
+                or not util.safehasattr(repo, b'_lockref')
             ):
                 return
-            if mode in (None, 'r', 'rb'):
+            if mode in (None, b'r', b'rb'):
                 return
             if path.startswith(repo.path):
                 # truncate name relative to the repository (.hg)
                 path = path[len(repo.path) + 1 :]
-            if path.startswith('cache/'):
-                msg = 'accessing cache with vfs instead of cachevfs: "%s"'
-                repo.ui.develwarn(msg % path, stacklevel=3, config="cache-vfs")
-            if path.startswith('journal.') or path.startswith('undo.'):
+            if path.startswith(b'cache/'):
+                msg = b'accessing cache with vfs instead of cachevfs: "%s"'
+                repo.ui.develwarn(msg % path, stacklevel=3, config=b"cache-vfs")
+            if path.startswith(b'journal.') or path.startswith(b'undo.'):
                 # journal is covered by 'lock'
                 if repo._currentlock(repo._lockref) is None:
                     repo.ui.develwarn(
-                        'write with no lock: "%s"' % path,
+                        b'write with no lock: "%s"' % path,
                         stacklevel=3,
-                        config='check-locks',
+                        config=b'check-locks',
                     )
             elif repo._currentlock(repo._wlockref) is None:
                 # rest of vfs files are covered by 'wlock'
@@ -1212,9 +1212,9 @@
                     if path.startswith(prefix):
                         return
                 repo.ui.develwarn(
-                    'write with no wlock: "%s"' % path,
+                    b'write with no wlock: "%s"' % path,
                     stacklevel=3,
-                    config='check-locks',
+                    config=b'check-locks',
                 )
             return ret
 
@@ -1227,16 +1227,16 @@
         def checksvfs(path, mode=None):
             ret = origfunc(path, mode=mode)
             repo = rref()
-            if repo is None or not util.safehasattr(repo, '_lockref'):
+            if repo is None or not util.safehasattr(repo, b'_lockref'):
                 return
-            if mode in (None, 'r', 'rb'):
+            if mode in (None, b'r', b'rb'):
                 return
             if path.startswith(repo.sharedpath):
                 # truncate name relative to the repository (.hg)
                 path = path[len(repo.sharedpath) + 1 :]
             if repo._currentlock(repo._lockref) is None:
                 repo.ui.develwarn(
-                    'write with no lock: "%s"' % path, stacklevel=4
+                    b'write with no lock: "%s"' % path, stacklevel=4
                 )
             return ret
 
@@ -1250,12 +1250,12 @@
             self._revbranchcache.write()
 
     def _restrictcapabilities(self, caps):
-        if self.ui.configbool('experimental', 'bundle2-advertise'):
+        if self.ui.configbool(b'experimental', b'bundle2-advertise'):
             caps = set(caps)
             capsblob = bundle2.encodecaps(
-                bundle2.getrepocaps(self, role='client')
+                bundle2.getrepocaps(self, role=b'client')
             )
-            caps.add('bundle2=' + urlreq.quote(capsblob))
+            caps.add(b'bundle2=' + urlreq.quote(capsblob))
         return caps
 
     def _writerequirements(self):
@@ -1305,7 +1305,7 @@
         ctx = self[None]
         parts = util.splitpath(subpath)
         while parts:
-            prefix = '/'.join(parts)
+            prefix = b'/'.join(parts)
             if prefix in ctx.substate:
                 if prefix == normsubpath:
                     return True
@@ -1337,17 +1337,17 @@
 
         In other word, there is always only one level of `repoview` "filtering".
         """
-        if self._extrafilterid is not None and '%' not in name:
-            name = name + '%' + self._extrafilterid
+        if self._extrafilterid is not None and b'%' not in name:
+            name = name + b'%' + self._extrafilterid
 
         cls = repoview.newtype(self.unfiltered().__class__)
         return cls(self, name, visibilityexceptions)
 
     @mixedrepostorecache(
-        ('bookmarks', 'plain'),
-        ('bookmarks.current', 'plain'),
-        ('bookmarks', ''),
-        ('00changelog.i', ''),
+        (b'bookmarks', b'plain'),
+        (b'bookmarks.current', b'plain'),
+        (b'bookmarks', b''),
+        (b'00changelog.i', b''),
     )
     def _bookmarks(self):
         # Since the multiple files involved in the transaction cannot be
@@ -1403,7 +1403,7 @@
 
     def _refreshchangelog(self):
         """make sure the in memory changelog match the on-disk one"""
-        if 'changelog' in vars(self) and self.currenttransaction() is None:
+        if b'changelog' in vars(self) and self.currenttransaction() is None:
             del self.changelog
 
     @property
@@ -1413,23 +1413,23 @@
     # _phasesets depend on changelog. what we need is to call
     # _phasecache.invalidate() if '00changelog.i' was changed, but it
     # can't be easily expressed in filecache mechanism.
-    @storecache('phaseroots', '00changelog.i')
+    @storecache(b'phaseroots', b'00changelog.i')
     def _phasecache(self):
         return phases.phasecache(self, self._phasedefaults)
 
-    @storecache('obsstore')
+    @storecache(b'obsstore')
     def obsstore(self):
         return obsolete.makestore(self.ui, self)
 
-    @storecache('00changelog.i')
+    @storecache(b'00changelog.i')
     def changelog(self):
         return self.store.changelog(txnutil.mayhavepending(self.root))
 
-    @storecache('00manifest.i')
+    @storecache(b'00manifest.i')
     def manifestlog(self):
         return self.store.manifestlog(self, self._storenarrowmatch)
 
-    @repofilecache('dirstate')
+    @repofilecache(b'dirstate')
     def dirstate(self):
         return self._makedirstate()
 
@@ -1449,7 +1449,7 @@
             if not self._dirstatevalidatewarned:
                 self._dirstatevalidatewarned = True
                 self.ui.warn(
-                    _("warning: ignoring unknown" " working parent %s!\n")
+                    _(b"warning: ignoring unknown" b" working parent %s!\n")
                     % short(node)
                 )
             return nullid
@@ -1516,13 +1516,13 @@
             if isinstance(changeid, int):
                 node = self.changelog.node(changeid)
                 rev = changeid
-            elif changeid == 'null':
+            elif changeid == b'null':
                 node = nullid
                 rev = nullrev
-            elif changeid == 'tip':
+            elif changeid == b'tip':
                 node = self.changelog.tip()
                 rev = self.changelog.rev(node)
-            elif changeid == '.':
+            elif changeid == b'.':
                 # this is a hack to delay/avoid loading obsmarkers
                 # when we know that '.' won't be hidden
                 node = self.dirstate.p1()
@@ -1543,7 +1543,7 @@
                         self.local()
                         and changeid in self.unfiltered().dirstate.parents()
                     ):
-                        msg = _("working directory has unknown parent '%s'!")
+                        msg = _(b"working directory has unknown parent '%s'!")
                         raise error.Abort(msg % short(changeid))
                     changeid = hex(changeid)  # for the error message
                     raise
@@ -1553,7 +1553,7 @@
                 rev = self.changelog.rev(node)
             else:
                 raise error.ProgrammingError(
-                    "unsupported changeid '%s' of type %s"
+                    b"unsupported changeid '%s' of type %s"
                     % (changeid, type(changeid))
                 )
 
@@ -1561,11 +1561,11 @@
 
         except (error.FilteredIndexError, error.FilteredLookupError):
             raise error.FilteredRepoLookupError(
-                _("filtered revision '%s'") % pycompat.bytestr(changeid)
+                _(b"filtered revision '%s'") % pycompat.bytestr(changeid)
             )
         except (IndexError, LookupError):
             raise error.RepoLookupError(
-                _("unknown revision '%s'") % pycompat.bytestr(changeid)
+                _(b"unknown revision '%s'") % pycompat.bytestr(changeid)
             )
         except error.WdirUnsupported:
             return context.workingctx(self)
@@ -1643,7 +1643,7 @@
         return m(self)
 
     def url(self):
-        return 'file:' + self.root
+        return b'file:' + self.root
 
     def hook(self, name, throw=False, **args):
         """Call a hook, passing this repo instance.
@@ -1711,7 +1711,7 @@
         # map tag name to (node, hist)
         alltags = tagsmod.findglobaltags(self.ui, self)
         # map tag name to tag type
-        tagtypes = dict((tag, 'global') for tag in alltags)
+        tagtypes = dict((tag, b'global') for tag in alltags)
 
         tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
 
@@ -1723,7 +1723,7 @@
         for (name, (node, hist)) in alltags.iteritems():
             if node != nullid:
                 tags[encoding.tolocal(name)] = node
-        tags['tip'] = self.changelog.tip()
+        tags[b'tip'] = self.changelog.tip()
         tagtypes = dict(
             [
                 (encoding.tolocal(name), value)
@@ -1791,14 +1791,14 @@
             return self.branchmap().branchtip(branch)
         except KeyError:
             if not ignoremissing:
-                raise error.RepoLookupError(_("unknown branch '%s'") % branch)
+                raise error.RepoLookupError(_(b"unknown branch '%s'") % branch)
             else:
                 pass
 
     def lookup(self, key):
         node = scmutil.revsymbol(self, key).node()
         if node is None:
-            raise error.RepoLookupError(_("unknown revision '%s'") % key)
+            raise error.RepoLookupError(_(b"unknown revision '%s'") % key)
         return node
 
     def lookupbranch(self, key):
@@ -1824,7 +1824,7 @@
     def publishing(self):
         # it's safe (and desirable) to trust the publish flag unconditionally
         # so that we don't finalize changes shared between users via ssh or nfs
-        return self.ui.configbool('phases', 'publish', untrusted=True)
+        return self.ui.configbool(b'phases', b'publish', untrusted=True)
 
     def cancopy(self):
         # so statichttprepo's override of local() works
@@ -1833,12 +1833,12 @@
         if not self.publishing():
             return True
         # if publishing we can't copy if there is filtered content
-        return not self.filtered('visible').changelog.filteredrevs
+        return not self.filtered(b'visible').changelog.filteredrevs
 
     def shared(self):
         '''the type of shared repository (None if not shared)'''
         if self.sharedpath != self.path:
-            return 'store'
+            return b'store'
         return None
 
     def wjoin(self, f, *insidef):
@@ -1877,9 +1877,9 @@
         if filter not in self._filterpats:
             l = []
             for pat, cmd in self.ui.configitems(filter):
-                if cmd == '!':
+                if cmd == b'!':
                     continue
-                mf = matchmod.match(self.root, '', [pat])
+                mf = matchmod.match(self.root, b'', [pat])
                 fn = None
                 params = cmd
                 for name, filterfn in self._datafilters.iteritems():
@@ -1900,7 +1900,7 @@
     def _filter(self, filterpats, filename, data):
         for mf, fn, cmd in filterpats:
             if mf(filename):
-                self.ui.debug("filtering %s through %s\n" % (filename, cmd))
+                self.ui.debug(b"filtering %s through %s\n" % (filename, cmd))
                 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
                 break
 
@@ -1908,11 +1908,11 @@
 
     @unfilteredpropertycache
     def _encodefilterpats(self):
-        return self._loadfilter('encode')
+        return self._loadfilter(b'encode')
 
     @unfilteredpropertycache
     def _decodefilterpats(self):
-        return self._loadfilter('decode')
+        return self._loadfilter(b'decode')
 
     def adddatafilter(self, name, filter):
         self._datafilters[name] = filter
@@ -1930,13 +1930,13 @@
         This returns length of written (maybe decoded) data.
         """
         data = self._filter(self._decodefilterpats, filename, data)
-        if 'l' in flags:
+        if b'l' in flags:
             self.wvfs.symlink(data, filename)
         else:
             self.wvfs.write(
                 filename, data, backgroundclose=backgroundclose, **kwargs
             )
-            if 'x' in flags:
+            if b'x' in flags:
                 self.wvfs.setflags(filename, False, True)
             else:
                 self.wvfs.setflags(filename, False, False)
@@ -1957,26 +1957,26 @@
         return None
 
     def transaction(self, desc, report=None):
-        if self.ui.configbool('devel', 'all-warnings') or self.ui.configbool(
-            'devel', 'check-locks'
+        if self.ui.configbool(b'devel', b'all-warnings') or self.ui.configbool(
+            b'devel', b'check-locks'
         ):
             if self._currentlock(self._lockref) is None:
-                raise error.ProgrammingError('transaction requires locking')
+                raise error.ProgrammingError(b'transaction requires locking')
         tr = self.currenttransaction()
         if tr is not None:
             return tr.nest(name=desc)
 
         # abort here if the journal already exists
-        if self.svfs.exists("journal"):
+        if self.svfs.exists(b"journal"):
             raise error.RepoError(
-                _("abandoned transaction found"),
-                hint=_("run 'hg recover' to clean up transaction"),
+                _(b"abandoned transaction found"),
+                hint=_(b"run 'hg recover' to clean up transaction"),
             )
 
-        idbase = "%.40f#%f" % (random.random(), time.time())
+        idbase = b"%.40f#%f" % (random.random(), time.time())
         ha = hex(hashlib.sha1(idbase).digest())
-        txnid = 'TXN:' + ha
-        self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
+        txnid = b'TXN:' + ha
+        self.hook(b'pretxnopen', throw=True, txnname=desc, txnid=txnid)
 
         self._writejournal(desc)
         renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
@@ -1984,7 +1984,7 @@
             rp = report
         else:
             rp = self.ui.warn
-        vfsmap = {'plain': self.vfs, 'store': self.svfs}  # root of .hg/
+        vfsmap = {b'plain': self.vfs, b'store': self.svfs}  # root of .hg/
         # we must avoid cyclic reference between repo and transaction.
         reporef = weakref.ref(self)
         # Code to track tag movement
@@ -2022,8 +2022,10 @@
         #   "+M": tag is moved (new value),
         tracktags = lambda x: None
         # experimental config: experimental.hook-track-tags
-        shouldtracktags = self.ui.configbool('experimental', 'hook-track-tags')
-        if desc != 'strip' and shouldtracktags:
+        shouldtracktags = self.ui.configbool(
+            b'experimental', b'hook-track-tags'
+        )
+        if desc != b'strip' and shouldtracktags:
             oldheads = self.changelog.headrevs()
 
             def tracktags(tr2):
@@ -2035,9 +2037,9 @@
                 # As we do it only once buiding set would not be cheaper
                 changes = tagsmod.difftags(repo.ui, repo, oldfnodes, newfnodes)
                 if changes:
-                    tr2.hookargs['tag_moved'] = '1'
+                    tr2.hookargs[b'tag_moved'] = b'1'
                     with repo.vfs(
-                        'changes/tags.changes', 'w', atomictemp=True
+                        b'changes/tags.changes', b'w', atomictemp=True
                     ) as changesfile:
                         # note: we do not register the file to the transaction
                         # because we needs it to still exist on the transaction
@@ -2066,35 +2068,39 @@
             repo = reporef()
 
             r = repo.ui.configsuboptions(
-                'experimental', 'single-head-per-branch'
+                b'experimental', b'single-head-per-branch'
             )
             singlehead, singleheadsub = r
             if singlehead:
-                accountclosed = singleheadsub.get("account-closed-heads", False)
+                accountclosed = singleheadsub.get(
+                    b"account-closed-heads", False
+                )
                 scmutil.enforcesinglehead(repo, tr2, desc, accountclosed)
-            if hook.hashook(repo.ui, 'pretxnclose-bookmark'):
-                for name, (old, new) in sorted(tr.changes['bookmarks'].items()):
+            if hook.hashook(repo.ui, b'pretxnclose-bookmark'):
+                for name, (old, new) in sorted(
+                    tr.changes[b'bookmarks'].items()
+                ):
                     args = tr.hookargs.copy()
                     args.update(bookmarks.preparehookargs(name, old, new))
                     repo.hook(
-                        'pretxnclose-bookmark',
+                        b'pretxnclose-bookmark',
                         throw=True,
                         **pycompat.strkwargs(args)
                     )
-            if hook.hashook(repo.ui, 'pretxnclose-phase'):
+            if hook.hashook(repo.ui, b'pretxnclose-phase'):
                 cl = repo.unfiltered().changelog
-                for rev, (old, new) in tr.changes['phases'].items():
+                for rev, (old, new) in tr.changes[b'phases'].items():
                     args = tr.hookargs.copy()
                     node = hex(cl.node(rev))
                     args.update(phases.preparehookargs(node, old, new))
                     repo.hook(
-                        'pretxnclose-phase',
+                        b'pretxnclose-phase',
                         throw=True,
                         **pycompat.strkwargs(args)
                     )
 
             repo.hook(
-                'pretxnclose', throw=True, **pycompat.strkwargs(tr.hookargs)
+                b'pretxnclose', throw=True, **pycompat.strkwargs(tr.hookargs)
             )
 
         def releasefn(tr, success):
@@ -2115,9 +2121,9 @@
             else:
                 # discard all changes (including ones already written
                 # out) in this transaction
-                narrowspec.restorebackup(self, 'journal.narrowspec')
-                narrowspec.restorewcbackup(self, 'journal.narrowspec.dirstate')
-                repo.dirstate.restorebackup(None, 'journal.dirstate')
+                narrowspec.restorebackup(self, b'journal.narrowspec')
+                narrowspec.restorewcbackup(self, b'journal.narrowspec.dirstate')
+                repo.dirstate.restorebackup(None, b'journal.dirstate')
 
                 repo.invalidate(clearfilecache=True)
 
@@ -2125,8 +2131,8 @@
             rp,
             self.svfs,
             vfsmap,
-            "journal",
-            "undo",
+            b"journal",
+            b"undo",
             aftertrans(renames),
             self.store.createmode,
             validator=validate,
@@ -2134,17 +2140,17 @@
             checkambigfiles=_cachedfiles,
             name=desc,
         )
-        tr.changes['origrepolen'] = len(self)
-        tr.changes['obsmarkers'] = set()
-        tr.changes['phases'] = {}
-        tr.changes['bookmarks'] = {}
-
-        tr.hookargs['txnid'] = txnid
-        tr.hookargs['txnname'] = desc
+        tr.changes[b'origrepolen'] = len(self)
+        tr.changes[b'obsmarkers'] = set()
+        tr.changes[b'phases'] = {}
+        tr.changes[b'bookmarks'] = {}
+
+        tr.hookargs[b'txnid'] = txnid
+        tr.hookargs[b'txnname'] = desc
         # note: writing the fncache only during finalize mean that the file is
         # outdated when running hooks. As fncache is used for streaming clone,
         # this is not expected to break anything that happen during the hooks.
-        tr.addfinalize('flush-fncache', self.store.write)
+        tr.addfinalize(b'flush-fncache', self.store.write)
 
         def txnclosehook(tr2):
             """To be run if transaction is successful, will schedule a hook run
@@ -2157,68 +2163,68 @@
 
             def hookfunc():
                 repo = reporef()
-                if hook.hashook(repo.ui, 'txnclose-bookmark'):
-                    bmchanges = sorted(tr.changes['bookmarks'].items())
+                if hook.hashook(repo.ui, b'txnclose-bookmark'):
+                    bmchanges = sorted(tr.changes[b'bookmarks'].items())
                     for name, (old, new) in bmchanges:
                         args = tr.hookargs.copy()
                         args.update(bookmarks.preparehookargs(name, old, new))
                         repo.hook(
-                            'txnclose-bookmark',
+                            b'txnclose-bookmark',
                             throw=False,
                             **pycompat.strkwargs(args)
                         )
 
-                if hook.hashook(repo.ui, 'txnclose-phase'):
+                if hook.hashook(repo.ui, b'txnclose-phase'):
                     cl = repo.unfiltered().changelog
-                    phasemv = sorted(tr.changes['phases'].items())
+                    phasemv = sorted(tr.changes[b'phases'].items())
                     for rev, (old, new) in phasemv:
                         args = tr.hookargs.copy()
                         node = hex(cl.node(rev))
                         args.update(phases.preparehookargs(node, old, new))
                         repo.hook(
-                            'txnclose-phase',
+                            b'txnclose-phase',
                             throw=False,
                             **pycompat.strkwargs(args)
                         )
 
                 repo.hook(
-                    'txnclose', throw=False, **pycompat.strkwargs(hookargs)
+                    b'txnclose', throw=False, **pycompat.strkwargs(hookargs)
                 )
 
             reporef()._afterlock(hookfunc)
 
-        tr.addfinalize('txnclose-hook', txnclosehook)
+        tr.addfinalize(b'txnclose-hook', txnclosehook)
         # Include a leading "-" to make it happen before the transaction summary
         # reports registered via scmutil.registersummarycallback() whose names
         # are 00-txnreport etc. That way, the caches will be warm when the
         # callbacks run.
-        tr.addpostclose('-warm-cache', self._buildcacheupdater(tr))
+        tr.addpostclose(b'-warm-cache', self._buildcacheupdater(tr))
 
         def txnaborthook(tr2):
             """To be run if transaction is aborted
             """
             reporef().hook(
-                'txnabort', throw=False, **pycompat.strkwargs(tr2.hookargs)
+                b'txnabort', throw=False, **pycompat.strkwargs(tr2.hookargs)
             )
 
-        tr.addabort('txnabort-hook', txnaborthook)
+        tr.addabort(b'txnabort-hook', txnaborthook)
         # avoid eager cache invalidation. in-memory data should be identical
         # to stored data if transaction has no error.
-        tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
+        tr.addpostclose(b'refresh-filecachestats', self._refreshfilecachestats)
         self._transref = weakref.ref(tr)
         scmutil.registersummarycallback(self, tr, desc)
         return tr
 
     def _journalfiles(self):
         return (
-            (self.svfs, 'journal'),
-            (self.svfs, 'journal.narrowspec'),
-            (self.vfs, 'journal.narrowspec.dirstate'),
-            (self.vfs, 'journal.dirstate'),
-            (self.vfs, 'journal.branch'),
-            (self.vfs, 'journal.desc'),
-            (bookmarks.bookmarksvfs(self), 'journal.bookmarks'),
-            (self.svfs, 'journal.phaseroots'),
+            (self.svfs, b'journal'),
+            (self.svfs, b'journal.narrowspec'),
+            (self.vfs, b'journal.narrowspec.dirstate'),
+            (self.vfs, b'journal.dirstate'),
+            (self.vfs, b'journal.branch'),
+            (self.vfs, b'journal.desc'),
+            (bookmarks.bookmarksvfs(self), b'journal.bookmarks'),
+            (self.svfs, b'journal.phaseroots'),
         )
 
     def undofiles(self):
@@ -2226,38 +2232,38 @@
 
     @unfilteredmethod
     def _writejournal(self, desc):
-        self.dirstate.savebackup(None, 'journal.dirstate')
-        narrowspec.savewcbackup(self, 'journal.narrowspec.dirstate')
-        narrowspec.savebackup(self, 'journal.narrowspec')
+        self.dirstate.savebackup(None, b'journal.dirstate')
+        narrowspec.savewcbackup(self, b'journal.narrowspec.dirstate')
+        narrowspec.savebackup(self, b'journal.narrowspec')
         self.vfs.write(
-            "journal.branch", encoding.fromlocal(self.dirstate.branch())
+            b"journal.branch", encoding.fromlocal(self.dirstate.branch())
         )
-        self.vfs.write("journal.desc", "%d\n%s\n" % (len(self), desc))
+        self.vfs.write(b"journal.desc", b"%d\n%s\n" % (len(self), desc))
         bookmarksvfs = bookmarks.bookmarksvfs(self)
         bookmarksvfs.write(
-            "journal.bookmarks", bookmarksvfs.tryread("bookmarks")
+            b"journal.bookmarks", bookmarksvfs.tryread(b"bookmarks")
         )
-        self.svfs.write("journal.phaseroots", self.svfs.tryread("phaseroots"))
+        self.svfs.write(b"journal.phaseroots", self.svfs.tryread(b"phaseroots"))
 
     def recover(self):
         with self.lock():
-            if self.svfs.exists("journal"):
-                self.ui.status(_("rolling back interrupted transaction\n"))
+            if self.svfs.exists(b"journal"):
+                self.ui.status(_(b"rolling back interrupted transaction\n"))
                 vfsmap = {
-                    '': self.svfs,
-                    'plain': self.vfs,
+                    b'': self.svfs,
+                    b'plain': self.vfs,
                 }
                 transaction.rollback(
                     self.svfs,
                     vfsmap,
-                    "journal",
+                    b"journal",
                     self.ui.warn,
                     checkambigfiles=_cachedfiles,
                 )
                 self.invalidate()
                 return True
             else:
-                self.ui.warn(_("no interrupted transaction available\n"))
+                self.ui.warn(_(b"no interrupted transaction available\n"))
                 return False
 
     def rollback(self, dryrun=False, force=False):
@@ -2265,12 +2271,12 @@
         try:
             wlock = self.wlock()
             lock = self.lock()
-            if self.svfs.exists("undo"):
-                dsguard = dirstateguard.dirstateguard(self, 'rollback')
+            if self.svfs.exists(b"undo"):
+                dsguard = dirstateguard.dirstateguard(self, b'rollback')
 
                 return self._rollback(dryrun, force, dsguard)
             else:
-                self.ui.warn(_("no rollback information available\n"))
+                self.ui.warn(_(b"no rollback information available\n"))
                 return 1
         finally:
             release(dsguard, lock, wlock)
@@ -2279,7 +2285,7 @@
     def _rollback(self, dryrun, force, dsguard):
         ui = self.ui
         try:
-            args = self.vfs.read('undo.desc').splitlines()
+            args = self.vfs.read(b'undo.desc').splitlines()
             (oldlen, desc, detail) = (int(args[0]), args[1], None)
             if len(args) >= 3:
                 detail = args[2]
@@ -2287,24 +2293,24 @@
 
             if detail and ui.verbose:
                 msg = _(
-                    'repository tip rolled back to revision %d'
-                    ' (undo %s: %s)\n'
+                    b'repository tip rolled back to revision %d'
+                    b' (undo %s: %s)\n'
                 ) % (oldtip, desc, detail)
             else:
                 msg = _(
-                    'repository tip rolled back to revision %d' ' (undo %s)\n'
+                    b'repository tip rolled back to revision %d' b' (undo %s)\n'
                 ) % (oldtip, desc)
         except IOError:
-            msg = _('rolling back unknown transaction\n')
+            msg = _(b'rolling back unknown transaction\n')
             desc = None
 
-        if not force and self['.'] != self['tip'] and desc == 'commit':
+        if not force and self[b'.'] != self[b'tip'] and desc == b'commit':
             raise error.Abort(
                 _(
-                    'rollback of last commit while not checked out '
-                    'may lose data'
+                    b'rollback of last commit while not checked out '
+                    b'may lose data'
                 ),
-                hint=_('use -f to force'),
+                hint=_(b'use -f to force'),
             )
 
         ui.status(msg)
@@ -2313,15 +2319,17 @@
 
         parents = self.dirstate.parents()
         self.destroying()
-        vfsmap = {'plain': self.vfs, '': self.svfs}
+        vfsmap = {b'plain': self.vfs, b'': self.svfs}
         transaction.rollback(
-            self.svfs, vfsmap, 'undo', ui.warn, checkambigfiles=_cachedfiles
+            self.svfs, vfsmap, b'undo', ui.warn, checkambigfiles=_cachedfiles
         )
         bookmarksvfs = bookmarks.bookmarksvfs(self)
-        if bookmarksvfs.exists('undo.bookmarks'):
-            bookmarksvfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
-        if self.svfs.exists('undo.phaseroots'):
-            self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True)
+        if bookmarksvfs.exists(b'undo.bookmarks'):
+            bookmarksvfs.rename(
+                b'undo.bookmarks', b'bookmarks', checkambig=True
+            )
+        if self.svfs.exists(b'undo.phaseroots'):
+            self.svfs.rename(b'undo.phaseroots', b'phaseroots', checkambig=True)
         self.invalidate()
 
         parentgone = any(p not in self.changelog.nodemap for p in parents)
@@ -2329,17 +2337,17 @@
             # prevent dirstateguard from overwriting already restored one
             dsguard.close()
 
-            narrowspec.restorebackup(self, 'undo.narrowspec')
-            narrowspec.restorewcbackup(self, 'undo.narrowspec.dirstate')
-            self.dirstate.restorebackup(None, 'undo.dirstate')
+            narrowspec.restorebackup(self, b'undo.narrowspec')
+            narrowspec.restorewcbackup(self, b'undo.narrowspec.dirstate')
+            self.dirstate.restorebackup(None, b'undo.dirstate')
             try:
-                branch = self.vfs.read('undo.branch')
+                branch = self.vfs.read(b'undo.branch')
                 self.dirstate.setbranch(encoding.tolocal(branch))
             except IOError:
                 ui.warn(
                     _(
-                        'named branch could not be reset: '
-                        'current branch is still \'%s\'\n'
+                        b'named branch could not be reset: '
+                        b'current branch is still \'%s\'\n'
                     )
                     % self.dirstate.branch()
                 )
@@ -2347,15 +2355,18 @@
             parents = tuple([p.rev() for p in self[None].parents()])
             if len(parents) > 1:
                 ui.status(
-                    _('working directory now based on ' 'revisions %d and %d\n')
+                    _(
+                        b'working directory now based on '
+                        b'revisions %d and %d\n'
+                    )
                     % parents
                 )
             else:
                 ui.status(
-                    _('working directory now based on ' 'revision %d\n')
+                    _(b'working directory now based on ' b'revision %d\n')
                     % parents
                 )
-            mergemod.mergestate.clean(self, self['.'].node())
+            mergemod.mergestate.clean(self, self[b'.'].node())
 
         # TODO: if we know which new heads may result from this rollback, pass
         # them to destroy(), which will prevent the branchhead cache from being
@@ -2390,16 +2401,16 @@
         If 'full' is set, make sure all caches the function knows about have
         up-to-date data. Even the ones usually loaded more lazily.
         """
-        if tr is not None and tr.hookargs.get('source') == 'strip':
+        if tr is not None and tr.hookargs.get(b'source') == b'strip':
             # During strip, many caches are invalid but
             # later call to `destroyed` will refresh them.
             return
 
-        if tr is None or tr.changes['origrepolen'] < len(self):
+        if tr is None or tr.changes[b'origrepolen'] < len(self):
             # accessing the 'ser ved' branchmap should refresh all the others,
-            self.ui.debug('updating the branch cache\n')
-            self.filtered('served').branchmap()
-            self.filtered('served.hidden').branchmap()
+            self.ui.debug(b'updating the branch cache\n')
+            self.filtered(b'served').branchmap()
+            self.filtered(b'served.hidden').branchmap()
 
         if full:
             unfi = self.unfiltered()
@@ -2409,14 +2420,14 @@
             rbc.write()
 
             # ensure the working copy parents are in the manifestfulltextcache
-            for ctx in self['.'].parents():
+            for ctx in self[b'.'].parents():
                 ctx.manifest()  # accessing the manifest is enough
 
             # accessing fnode cache warms the cache
             tagsmod.fnoderevs(self.ui, unfi, unfi.changelog.revs())
             # accessing tags warm the cache
             self.tags()
-            self.filtered('served').tags()
+            self.filtered(b'served').tags()
 
             # The `full` arg is documented as updating even the lazily-loaded
             # caches immediately, so we're forcing a write to cause these caches
@@ -2470,10 +2481,10 @@
         unfiltered = self.unfiltered()  # all file caches are stored unfiltered
         for k in list(self._filecache.keys()):
             # dirstate is invalidated separately in invalidatedirstate()
-            if k == 'dirstate':
+            if k == b'dirstate':
                 continue
             if (
-                k == 'changelog'
+                k == b'changelog'
                 and self.currenttransaction()
                 and self.changelog._delayed
             ):
@@ -2531,10 +2542,10 @@
         timeout = 0
         warntimeout = 0
         if wait:
-            timeout = self.ui.configint("ui", "timeout")
-            warntimeout = self.ui.configint("ui", "timeout.warn")
+            timeout = self.ui.configint(b"ui", b"timeout")
+            warntimeout = self.ui.configint(b"ui", b"timeout.warn")
         # internal config: ui.signal-safe-lock
-        signalsafe = self.ui.configbool('ui', 'signal-safe-lock')
+        signalsafe = self.ui.configbool(b'ui', b'signal-safe-lock')
 
         l = lockmod.trylock(
             self.ui,
@@ -2578,11 +2589,11 @@
 
         l = self._lock(
             vfs=self.svfs,
-            lockname="lock",
+            lockname=b"lock",
             wait=wait,
             releasefn=None,
             acquirefn=self.invalidate,
-            desc=_('repository %s') % self.origroot,
+            desc=_(b'repository %s') % self.origroot,
         )
         self._lockref = weakref.ref(l)
         return l
@@ -2590,7 +2601,7 @@
     def _wlockchecktransaction(self):
         if self.currenttransaction() is not None:
             raise error.LockInheritanceContractViolation(
-                'wlock cannot be inherited in the middle of a transaction'
+                b'wlock cannot be inherited in the middle of a transaction'
             )
 
     def wlock(self, wait=True):
@@ -2609,11 +2620,11 @@
         # We do not need to check for non-waiting lock acquisition.  Such
         # acquisition would not cause dead-lock as they would just fail.
         if wait and (
-            self.ui.configbool('devel', 'all-warnings')
-            or self.ui.configbool('devel', 'check-locks')
+            self.ui.configbool(b'devel', b'all-warnings')
+            or self.ui.configbool(b'devel', b'check-locks')
         ):
             if self._currentlock(self._lockref) is not None:
-                self.ui.develwarn('"wlock" acquired after "lock"')
+                self.ui.develwarn(b'"wlock" acquired after "lock"')
 
         def unlock():
             if self.dirstate.pendingparentchange():
@@ -2621,17 +2632,17 @@
             else:
                 self.dirstate.write(None)
 
-            self._filecache['dirstate'].refresh()
+            self._filecache[b'dirstate'].refresh()
 
         l = self._lock(
             self.vfs,
-            "wlock",
+            b"wlock",
             wait,
             unlock,
             self.invalidatedirstate,
-            _('working directory of %s') % self.origroot,
+            _(b'working directory of %s') % self.origroot,
             inheritchecker=self._wlockchecktransaction,
-            parentenvvar='HG_WLOCK_LOCKER',
+            parentenvvar=b'HG_WLOCK_LOCKER',
         )
         self._wlockref = weakref.ref(l)
         return l
@@ -2669,7 +2680,7 @@
         if isinstance(fctx, context.filectx):
             node = fctx.filenode()
             if node in [fparent1, fparent2]:
-                self.ui.debug('reusing %s filelog entry\n' % fname)
+                self.ui.debug(b'reusing %s filelog entry\n' % fname)
                 if (
                     fparent1 != nullid
                     and manifest1.flags(fname) != fctx.flags()
@@ -2722,16 +2733,18 @@
             # behavior in this circumstance.
 
             if cnode:
-                self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(cnode)))
+                self.ui.debug(
+                    b" %s: copy %s:%s\n" % (fname, cfname, hex(cnode))
+                )
                 if includecopymeta:
-                    meta["copy"] = cfname
-                    meta["copyrev"] = hex(cnode)
+                    meta[b"copy"] = cfname
+                    meta[b"copyrev"] = hex(cnode)
                 fparent1, fparent2 = nullid, newfparent
             else:
                 self.ui.warn(
                     _(
-                        "warning: can't find ancestor for '%s' "
-                        "copied from '%s'!\n"
+                        b"warning: can't find ancestor for '%s' "
+                        b"copied from '%s'!\n"
                     )
                     % (fname, cfname)
                 )
@@ -2764,24 +2777,24 @@
 
             for f in match.files():
                 f = self.dirstate.normalize(f)
-                if f == '.' or f in matched or f in wctx.substate:
+                if f == b'.' or f in matched or f in wctx.substate:
                     continue
                 if f in status.deleted:
-                    fail(f, _('file not found!'))
+                    fail(f, _(b'file not found!'))
                 if f in vdirs:  # visited directory
-                    d = f + '/'
+                    d = f + b'/'
                     for mf in matched:
                         if mf.startswith(d):
                             break
                     else:
-                        fail(f, _("no match under directory!"))
+                        fail(f, _(b"no match under directory!"))
                 elif f not in self.dirstate:
-                    fail(f, _("file not tracked!"))
+                    fail(f, _(b"file not tracked!"))
 
     @unfilteredmethod
     def commit(
         self,
-        text="",
+        text=b"",
         user=None,
         date=None,
         match=None,
@@ -2799,7 +2812,7 @@
             extra = {}
 
         def fail(f, msg):
-            raise error.Abort('%s: %s' % (f, msg))
+            raise error.Abort(b'%s: %s' % (f, msg))
 
         if not match:
             match = matchmod.always()
@@ -2817,8 +2830,8 @@
             if not force and merge and not match.always():
                 raise error.Abort(
                     _(
-                        'cannot partially commit a merge '
-                        '(do not specify files or patterns)'
+                        b'cannot partially commit a merge '
+                        b'(do not specify files or patterns)'
                     )
                 )
 
@@ -2844,16 +2857,16 @@
             # internal config: ui.allowemptycommit
             allowemptycommit = (
                 wctx.branch() != wctx.p1().branch()
-                or extra.get('close')
+                or extra.get(b'close')
                 or merge
                 or cctx.files()
-                or self.ui.configbool('ui', 'allowemptycommit')
+                or self.ui.configbool(b'ui', b'allowemptycommit')
             )
             if not allowemptycommit:
                 return None
 
             if merge and cctx.deleted():
-                raise error.Abort(_("cannot commit merge with missing files"))
+                raise error.Abort(_(b"cannot commit merge with missing files"))
 
             ms = mergemod.mergestate.read(self)
             mergeutil.checkunresolved(ms)
@@ -2873,7 +2886,7 @@
                 for s in sorted(commitsubs):
                     sub = wctx.sub(s)
                     self.ui.status(
-                        _('committing subrepository %s\n')
+                        _(b'committing subrepository %s\n')
                         % uipathfn(subrepoutil.subrelpath(sub))
                     )
                     sr = sub.commit(cctx._text, user, date)
@@ -2881,12 +2894,12 @@
                 subrepoutil.writestate(self, newstate)
 
             p1, p2 = self.dirstate.parents()
-            hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
+            hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or b'')
             try:
                 self.hook(
-                    "precommit", throw=True, parent1=hookp1, parent2=hookp2
+                    b"precommit", throw=True, parent1=hookp1, parent2=hookp2
                 )
-                with self.transaction('commit'):
+                with self.transaction(b'commit'):
                     ret = self.commitctx(cctx, True)
                     # update bookmarks, dirstate and mergestate
                     bookmarks.update(self, [p1, p2], ret)
@@ -2895,7 +2908,7 @@
             except:  # re-raises
                 if edited:
                     self.ui.write(
-                        _('note: commit message saved in %s\n') % msgfn
+                        _(b'note: commit message saved in %s\n') % msgfn
                     )
                 raise
 
@@ -2904,7 +2917,7 @@
             # temporary commit got stripped before hook release
             if self.changelog.hasnode(ret):
                 self.hook(
-                    "commit", node=hex(ret), parent1=hookp1, parent2=hookp2
+                    b"commit", node=hex(ret), parent1=hookp1, parent2=hookp2
                 )
 
         self._afterlock(commithook)
@@ -2930,23 +2943,23 @@
         p1, p2 = ctx.p1(), ctx.p2()
         user = ctx.user()
 
-        writecopiesto = self.ui.config('experimental', 'copies.write-to')
-        writefilecopymeta = writecopiesto != 'changeset-only'
+        writecopiesto = self.ui.config(b'experimental', b'copies.write-to')
+        writefilecopymeta = writecopiesto != b'changeset-only'
         writechangesetcopy = writecopiesto in (
-            'changeset-only',
-            'compatibility',
+            b'changeset-only',
+            b'compatibility',
         )
         p1copies, p2copies = None, None
         if writechangesetcopy:
             p1copies = ctx.p1copies()
             p2copies = ctx.p2copies()
         filesadded, filesremoved = None, None
-        with self.lock(), self.transaction("commit") as tr:
+        with self.lock(), self.transaction(b"commit") as tr:
             trp = weakref.proxy(tr)
 
             if ctx.manifestnode():
                 # reuse an existing manifest revision
-                self.ui.debug('reusing known manifest\n')
+                self.ui.debug(b'reusing known manifest\n')
                 mn = ctx.manifestnode()
                 files = ctx.files()
                 if writechangesetcopy:
@@ -2966,10 +2979,10 @@
                 changed = []
                 removed = list(ctx.removed())
                 linkrev = len(self)
-                self.ui.note(_("committing files:\n"))
+                self.ui.note(_(b"committing files:\n"))
                 uipathfn = scmutil.getuipathfn(self)
                 for f in sorted(ctx.modified() + ctx.added()):
-                    self.ui.note(uipathfn(f) + "\n")
+                    self.ui.note(uipathfn(f) + b"\n")
                     try:
                         fctx = ctx[f]
                         if fctx is None:
@@ -2988,14 +3001,14 @@
                             m.setflag(f, fctx.flags())
                     except OSError:
                         self.ui.warn(
-                            _("trouble committing %s!\n") % uipathfn(f)
+                            _(b"trouble committing %s!\n") % uipathfn(f)
                         )
                         raise
                     except IOError as inst:
                         errcode = getattr(inst, 'errno', errno.ENOENT)
                         if error or errcode and errcode != errno.ENOENT:
                             self.ui.warn(
-                                _("trouble committing %s!\n") % uipathfn(f)
+                                _(b"trouble committing %s!\n") % uipathfn(f)
                             )
                         raise
 
@@ -3060,11 +3073,11 @@
                     md = m1.diff(m, scmutil.matchfiles(self, ctx.files()))
                 if not files and md:
                     self.ui.debug(
-                        'not reusing manifest (no file change in '
-                        'changelog, but manifest differs)\n'
+                        b'not reusing manifest (no file change in '
+                        b'changelog, but manifest differs)\n'
                     )
                 if files or md:
-                    self.ui.note(_("committing manifest\n"))
+                    self.ui.note(_(b"committing manifest\n"))
                     # we're using narrowmatch here since it's already applied at
                     # other stages (such as dirstate.walk), so we're already
                     # ignoring things outside of narrowspec in most cases. The
@@ -3089,16 +3102,16 @@
                         filesremoved = removed
                 else:
                     self.ui.debug(
-                        'reusing manifest from p1 (listed files '
-                        'actually unchanged)\n'
+                        b'reusing manifest from p1 (listed files '
+                        b'actually unchanged)\n'
                     )
                     mn = p1.manifestnode()
             else:
-                self.ui.debug('reusing manifest from p1 (no file change)\n')
+                self.ui.debug(b'reusing manifest from p1 (no file change)\n')
                 mn = p1.manifestnode()
                 files = []
 
-            if writecopiesto == 'changeset-only':
+            if writecopiesto == b'changeset-only':
                 # If writing only to changeset extras, use None to indicate that
                 # no entry should be written. If writing to both, write an empty
                 # entry to prevent the reader from falling back to reading
@@ -3112,7 +3125,7 @@
                 files = origctx.files()
 
             # update changelog
-            self.ui.note(_("committing changelog\n"))
+            self.ui.note(_(b"committing changelog\n"))
             self.changelog.delayupdate(tr)
             n = self.changelog.add(
                 mn,
@@ -3129,9 +3142,9 @@
                 filesadded,
                 filesremoved,
             )
-            xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
+            xp1, xp2 = p1.hex(), p2 and p2.hex() or b''
             self.hook(
-                'pretxncommit',
+                b'pretxncommit',
                 throw=True,
                 node=hex(n),
                 parent1=xp1,
@@ -3163,7 +3176,7 @@
         # When using the same lock to commit and strip, the phasecache is left
         # dirty after committing. Then when we strip, the repo is invalidated,
         # causing those changes to disappear.
-        if '_phasecache' in vars(self):
+        if b'_phasecache' in vars(self):
             self._phasecache.write()
 
     @unfilteredmethod
@@ -3200,7 +3213,7 @@
 
     def status(
         self,
-        node1='.',
+        node1=b'.',
         node2=None,
         match=None,
         ignored=False,
@@ -3331,18 +3344,18 @@
             hookargs[r'key'] = key
             hookargs[r'old'] = old
             hookargs[r'new'] = new
-            self.hook('prepushkey', throw=True, **hookargs)
+            self.hook(b'prepushkey', throw=True, **hookargs)
         except error.HookAbort as exc:
-            self.ui.write_err(_("pushkey-abort: %s\n") % exc)
+            self.ui.write_err(_(b"pushkey-abort: %s\n") % exc)
             if exc.hint:
-                self.ui.write_err(_("(%s)\n") % exc.hint)
+                self.ui.write_err(_(b"(%s)\n") % exc.hint)
             return False
-        self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
+        self.ui.debug(b'pushing key for "%s:%s"\n' % (namespace, key))
         ret = pushkey.push(self, namespace, key, old, new)
 
         def runhook():
             self.hook(
-                'pushkey',
+                b'pushkey',
                 namespace=namespace,
                 key=key,
                 old=old,
@@ -3354,15 +3367,15 @@
         return ret
 
     def listkeys(self, namespace):
-        self.hook('prelistkeys', throw=True, namespace=namespace)
-        self.ui.debug('listing keys for "%s"\n' % namespace)
+        self.hook(b'prelistkeys', throw=True, namespace=namespace)
+        self.ui.debug(b'listing keys for "%s"\n' % namespace)
         values = pushkey.list(self, namespace)
-        self.hook('listkeys', namespace=namespace, values=values)
+        self.hook(b'listkeys', namespace=namespace, values=values)
         return values
 
     def debugwireargs(self, one, two, three=None, four=None, five=None):
         '''used to test argument passing over the wire'''
-        return "%s %s %s %s %s" % (
+        return b"%s %s %s %s %s" % (
             one,
             two,
             pycompat.bytestr(three),
@@ -3371,7 +3384,7 @@
         )
 
     def savecommitmessage(self, text):
-        fp = self.vfs('last-message.txt', 'wb')
+        fp = self.vfs(b'last-message.txt', b'wb')
         try:
             fp.write(text)
         finally:
@@ -3399,8 +3412,8 @@
 
 def undoname(fn):
     base, name = os.path.split(fn)
-    assert name.startswith('journal')
-    return os.path.join(base, name.replace('journal', 'undo', 1))
+    assert name.startswith(b'journal')
+    return os.path.join(base, name.replace(b'journal', b'undo', 1))
 
 
 def instance(ui, path, create, intents=None, createopts=None):
@@ -3423,9 +3436,9 @@
     """
     createopts = dict(createopts or {})
 
-    if 'backend' not in createopts:
+    if b'backend' not in createopts:
         # experimental config: storage.new-repo-backend
-        createopts['backend'] = ui.config('storage', 'new-repo-backend')
+        createopts[b'backend'] = ui.config(b'storage', b'new-repo-backend')
 
     return createopts
 
@@ -3438,85 +3451,86 @@
     """
     # If the repo is being created from a shared repository, we copy
     # its requirements.
-    if 'sharedrepo' in createopts:
-        requirements = set(createopts['sharedrepo'].requirements)
-        if createopts.get('sharedrelative'):
-            requirements.add('relshared')
+    if b'sharedrepo' in createopts:
+        requirements = set(createopts[b'sharedrepo'].requirements)
+        if createopts.get(b'sharedrelative'):
+            requirements.add(b'relshared')
         else:
-            requirements.add('shared')
+            requirements.add(b'shared')
 
         return requirements
 
-    if 'backend' not in createopts:
+    if b'backend' not in createopts:
         raise error.ProgrammingError(
-            'backend key not present in createopts; '
-            'was defaultcreateopts() called?'
+            b'backend key not present in createopts; '
+            b'was defaultcreateopts() called?'
         )
 
-    if createopts['backend'] != 'revlogv1':
+    if createopts[b'backend'] != b'revlogv1':
         raise error.Abort(
             _(
-                'unable to determine repository requirements for '
-                'storage backend: %s'
+                b'unable to determine repository requirements for '
+                b'storage backend: %s'
             )
-            % createopts['backend']
+            % createopts[b'backend']
         )
 
-    requirements = {'revlogv1'}
-    if ui.configbool('format', 'usestore'):
-        requirements.add('store')
-        if ui.configbool('format', 'usefncache'):
-            requirements.add('fncache')
-            if ui.configbool('format', 'dotencode'):
-                requirements.add('dotencode')
-
-    compengine = ui.config('format', 'revlog-compression')
+    requirements = {b'revlogv1'}
+    if ui.configbool(b'format', b'usestore'):
+        requirements.add(b'store')
+        if ui.configbool(b'format', b'usefncache'):
+            requirements.add(b'fncache')
+            if ui.configbool(b'format', b'dotencode'):
+                requirements.add(b'dotencode')
+
+    compengine = ui.config(b'format', b'revlog-compression')
     if compengine not in util.compengines:
         raise error.Abort(
             _(
-                'compression engine %s defined by '
-                'format.revlog-compression not available'
+                b'compression engine %s defined by '
+                b'format.revlog-compression not available'
             )
             % compengine,
             hint=_(
-                'run "hg debuginstall" to list available ' 'compression engines'
+                b'run "hg debuginstall" to list available '
+                b'compression engines'
             ),
         )
 
     # zlib is the historical default and doesn't need an explicit requirement.
-    elif compengine == 'zstd':
-        requirements.add('revlog-compression-zstd')
-    elif compengine != 'zlib':
-        requirements.add('exp-compression-%s' % compengine)
+    elif compengine == b'zstd':
+        requirements.add(b'revlog-compression-zstd')
+    elif compengine != b'zlib':
+        requirements.add(b'exp-compression-%s' % compengine)
 
     if scmutil.gdinitconfig(ui):
-        requirements.add('generaldelta')
-        if ui.configbool('format', 'sparse-revlog'):
+        requirements.add(b'generaldelta')
+        if ui.configbool(b'format', b'sparse-revlog'):
             requirements.add(SPARSEREVLOG_REQUIREMENT)
 
     # experimental config: format.use-side-data
-    if ui.configbool('format', 'use-side-data'):
+    if ui.configbool(b'format', b'use-side-data'):
         requirements.add(SIDEDATA_REQUIREMENT)
-    if ui.configbool('experimental', 'treemanifest'):
-        requirements.add('treemanifest')
-
-    revlogv2 = ui.config('experimental', 'revlogv2')
-    if revlogv2 == 'enable-unstable-format-and-corrupt-my-data':
-        requirements.remove('revlogv1')
+    if ui.configbool(b'experimental', b'treemanifest'):
+        requirements.add(b'treemanifest')
+
+    revlogv2 = ui.config(b'experimental', b'revlogv2')
+    if revlogv2 == b'enable-unstable-format-and-corrupt-my-data':
+        requirements.remove(b'revlogv1')
         # generaldelta is implied by revlogv2.
-        requirements.discard('generaldelta')
+        requirements.discard(b'generaldelta')
         requirements.add(REVLOGV2_REQUIREMENT)
     # experimental config: format.internal-phase
-    if ui.configbool('format', 'internal-phase'):
-        requirements.add('internal-phase')
-
-    if createopts.get('narrowfiles'):
+    if ui.configbool(b'format', b'internal-phase'):
+        requirements.add(b'internal-phase')
+
+    if createopts.get(b'narrowfiles'):
         requirements.add(repository.NARROW_REQUIREMENT)
 
-    if createopts.get('lfs'):
-        requirements.add('lfs')
-
-    if ui.configbool('format', 'bookmarks-in-store'):
+    if createopts.get(b'lfs'):
+        requirements.add(b'lfs')
+
+    if ui.configbool(b'format', b'bookmarks-in-store'):
         requirements.add(bookmarks.BOOKMARKS_IN_STORE_REQUIREMENT)
 
     return requirements
@@ -3537,13 +3551,13 @@
     they know how to handle.
     """
     known = {
-        'backend',
-        'lfs',
-        'narrowfiles',
-        'sharedrepo',
-        'sharedrelative',
-        'shareditems',
-        'shallowfilestore',
+        b'backend',
+        b'lfs',
+        b'narrowfiles',
+        b'sharedrepo',
+        b'sharedrelative',
+        b'shareditems',
+        b'shallowfilestore',
     }
 
     return {k: v for k, v in createopts.items() if k not in known}
@@ -3582,17 +3596,17 @@
 
     if not isinstance(unknownopts, dict):
         raise error.ProgrammingError(
-            'filterknowncreateopts() did not return ' 'a dict'
+            b'filterknowncreateopts() did not return ' b'a dict'
         )
 
     if unknownopts:
         raise error.Abort(
             _(
-                'unable to create repository because of unknown '
-                'creation option: %s'
+                b'unable to create repository because of unknown '
+                b'creation option: %s'
             )
-            % ', '.join(sorted(unknownopts)),
-            hint=_('is a required extension not loaded?'),
+            % b', '.join(sorted(unknownopts)),
+            hint=_(b'is a required extension not loaded?'),
         )
 
     requirements = newreporequirements(ui, createopts=createopts)
@@ -3601,19 +3615,19 @@
 
     hgvfs = vfsmod.vfs(wdirvfs.join(b'.hg'))
     if hgvfs.exists():
-        raise error.RepoError(_('repository %s already exists') % path)
-
-    if 'sharedrepo' in createopts:
-        sharedpath = createopts['sharedrepo'].sharedpath
-
-        if createopts.get('sharedrelative'):
+        raise error.RepoError(_(b'repository %s already exists') % path)
+
+    if b'sharedrepo' in createopts:
+        sharedpath = createopts[b'sharedrepo'].sharedpath
+
+        if createopts.get(b'sharedrelative'):
             try:
                 sharedpath = os.path.relpath(sharedpath, hgvfs.base)
             except (IOError, ValueError) as e:
                 # ValueError is raised on Windows if the drive letters differ
                 # on each path.
                 raise error.Abort(
-                    _('cannot calculate relative path'),
+                    _(b'cannot calculate relative path'),
                     hint=stringutil.forcebytestr(e),
                 )
 
@@ -3621,11 +3635,11 @@
         wdirvfs.makedirs()
 
     hgvfs.makedir(notindexed=True)
-    if 'sharedrepo' not in createopts:
+    if b'sharedrepo' not in createopts:
         hgvfs.mkdir(b'cache')
     hgvfs.mkdir(b'wcache')
 
-    if b'store' in requirements and 'sharedrepo' not in createopts:
+    if b'store' in requirements and b'sharedrepo' not in createopts:
         hgvfs.mkdir(b'store')
 
         # We create an invalid changelog outside the store so very old
@@ -3645,11 +3659,11 @@
     scmutil.writerequires(hgvfs, requirements)
 
     # Write out file telling readers where to find the shared store.
-    if 'sharedrepo' in createopts:
+    if b'sharedrepo' in createopts:
         hgvfs.write(b'sharedpath', sharedpath)
 
-    if createopts.get('shareditems'):
-        shared = b'\n'.join(sorted(createopts['shareditems'])) + b'\n'
+    if createopts.get(b'shareditems'):
+        shared = b'\n'.join(sorted(createopts[b'shareditems'])) + b'\n'
         hgvfs.write(b'shared', shared)
 
 
@@ -3669,7 +3683,7 @@
                 return object.__getattribute__(self, item)
 
             raise error.ProgrammingError(
-                'repo instances should not be used ' 'after unshare'
+                b'repo instances should not be used ' b'after unshare'
             )
 
         def close(self):
--- a/mercurial/lock.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/lock.py	Sun Oct 06 09:48:39 2019 -0400
@@ -35,9 +35,9 @@
     extra Linux-specific pid namespace identifier.
     """
     result = encoding.strtolocal(socket.gethostname())
-    if pycompat.sysplatform.startswith('linux'):
+    if pycompat.sysplatform.startswith(b'linux'):
         try:
-            result += '/%x' % os.stat('/proc/self/ns/pid').st_ino
+            result += b'/%x' % os.stat(b'/proc/self/ns/pid').st_ino
         except OSError as ex:
             if ex.errno not in (errno.ENOENT, errno.EACCES, errno.ENOTDIR):
                 raise
@@ -76,7 +76,13 @@
     try:
         # save handlers first so they can be restored even if a setup is
         # interrupted between signal.signal() and orighandlers[] =.
-        for name in ['CTRL_C_EVENT', 'SIGINT', 'SIGBREAK', 'SIGHUP', 'SIGTERM']:
+        for name in [
+            b'CTRL_C_EVENT',
+            b'SIGINT',
+            b'SIGBREAK',
+            b'SIGHUP',
+            b'SIGTERM',
+        ]:
             num = getattr(signal, name, None)
             if num and num not in orighandlers:
                 orighandlers[num] = signal.getsignal(num)
@@ -114,17 +120,17 @@
     def printwarning(printer, locker):
         """issue the usual "waiting on lock" message through any channel"""
         # show more details for new-style locks
-        if ':' in locker:
-            host, pid = locker.split(":", 1)
+        if b':' in locker:
+            host, pid = locker.split(b":", 1)
             msg = _(
-                "waiting for lock on %s held by process %r on host %r\n"
+                b"waiting for lock on %s held by process %r on host %r\n"
             ) % (
                 pycompat.bytestr(l.desc),
                 pycompat.bytestr(pid),
                 pycompat.bytestr(host),
             )
         else:
-            msg = _("waiting for lock on %s held by %r\n") % (
+            msg = _(b"waiting for lock on %s held by %r\n") % (
                 l.desc,
                 pycompat.bytestr(locker),
             )
@@ -159,9 +165,9 @@
     l.delay = delay
     if l.delay:
         if 0 <= warningidx <= l.delay:
-            ui.warn(_("got lock after %d seconds\n") % l.delay)
+            ui.warn(_(b"got lock after %d seconds\n") % l.delay)
         else:
-            ui.debug("got lock after %d seconds\n" % l.delay)
+            ui.debug(b"got lock after %d seconds\n" % l.delay)
     if l.acquirefn:
         l.acquirefn()
     return l
@@ -268,7 +274,7 @@
             return
         if lock._host is None:
             lock._host = _getlockprefix()
-        lockname = '%s:%d' % (lock._host, self.pid)
+        lockname = b'%s:%d' % (lock._host, self.pid)
         retry = 5
         while not self.held and retry:
             retry -= 1
@@ -307,7 +313,7 @@
             # use empty locker to mean "busy for frequent lock/unlock
             # by many processes"
             raise error.LockHeld(
-                errno.EAGAIN, self.vfs.join(self.f), self.desc, ""
+                errno.EAGAIN, self.vfs.join(self.f), self.desc, b""
             )
 
     def _readlock(self):
@@ -327,7 +333,7 @@
         if locker is None:
             return None
         try:
-            host, pid = locker.split(":", 1)
+            host, pid = locker.split(b":", 1)
         except ValueError:
             return locker
         if host != lock._host:
@@ -341,7 +347,7 @@
         # if locker dead, break lock.  must do this with another lock
         # held, or can race and break valid lock.
         try:
-            l = lock(self.vfs, self.f + '.break', timeout=0)
+            l = lock(self.vfs, self.f + b'.break', timeout=0)
             self.vfs.unlink(self.f)
             l.release()
         except error.LockError:
@@ -371,11 +377,11 @@
         """
         if not self.held:
             raise error.LockInheritanceContractViolation(
-                'inherit can only be called while lock is held'
+                b'inherit can only be called while lock is held'
             )
         if self._inherited:
             raise error.LockInheritanceContractViolation(
-                'inherit cannot be called while lock is already inherited'
+                b'inherit cannot be called while lock is already inherited'
             )
         if self._inheritchecker is not None:
             self._inheritchecker()
--- a/mercurial/logcmdutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/logcmdutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -44,14 +44,14 @@
 
 def getlimit(opts):
     """get the log limit according to option -l/--limit"""
-    limit = opts.get('limit')
+    limit = opts.get(b'limit')
     if limit:
         try:
             limit = int(limit)
         except ValueError:
-            raise error.Abort(_('limit must be a positive integer'))
+            raise error.Abort(_(b'limit must be a positive integer'))
         if limit <= 0:
-            raise error.Abort(_('limit must be positive'))
+            raise error.Abort(_(b'limit must be positive'))
     else:
         limit = None
     return limit
@@ -68,8 +68,8 @@
     stat=False,
     fp=None,
     graphwidth=0,
-    prefix='',
-    root='',
+    prefix=b'',
+    root=b'',
     listsubrepos=False,
     hunksfilterfn=None,
 ):
@@ -79,7 +79,7 @@
     if root:
         relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
     else:
-        relroot = ''
+        relroot = b''
     copysourcematch = None
 
     def compose(f, g):
@@ -88,31 +88,31 @@
     def pathfn(f):
         return posixpath.join(prefix, f)
 
-    if relroot != '':
+    if relroot != b'':
         # XXX relative roots currently don't work if the root is within a
         # subrepo
         uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
         uirelroot = uipathfn(pathfn(relroot))
-        relroot += '/'
+        relroot += b'/'
         for matchroot in match.files():
             if not matchroot.startswith(relroot):
                 ui.warn(
-                    _('warning: %s not inside relative root %s\n')
+                    _(b'warning: %s not inside relative root %s\n')
                     % (uipathfn(pathfn(matchroot)), uirelroot)
                 )
 
-        relrootmatch = scmutil.match(ctx2, pats=[relroot], default='path')
+        relrootmatch = scmutil.match(ctx2, pats=[relroot], default=b'path')
         match = matchmod.intersectmatchers(match, relrootmatch)
         copysourcematch = relrootmatch
 
         checkroot = repo.ui.configbool(
-            'devel', 'all-warnings'
-        ) or repo.ui.configbool('devel', 'check-relroot')
+            b'devel', b'all-warnings'
+        ) or repo.ui.configbool(b'devel', b'check-relroot')
 
         def relrootpathfn(f):
             if checkroot and not f.startswith(relroot):
                 raise AssertionError(
-                    "file %s doesn't start with relroot %s" % (f, relroot)
+                    b"file %s doesn't start with relroot %s" % (f, relroot)
                 )
             return f[len(relroot) :]
 
@@ -214,14 +214,14 @@
 
 
 def changesetlabels(ctx):
-    labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
+    labels = [b'log.changeset', b'changeset.%s' % ctx.phasestr()]
     if ctx.obsolete():
-        labels.append('changeset.obsolete')
+        labels.append(b'changeset.obsolete')
     if ctx.isunstable():
-        labels.append('changeset.unstable')
+        labels.append(b'changeset.unstable')
         for instability in ctx.instabilities():
-            labels.append('instability.%s' % instability)
-    return ' '.join(labels)
+            labels.append(b'instability.%s' % instability)
+    return b' '.join(labels)
 
 
 class changesetprinter(object):
@@ -233,8 +233,8 @@
         self.buffered = buffered
         self._differ = differ or changesetdiffer()
         self._diffopts = patch.diffallopts(ui, diffopts)
-        self._includestat = diffopts and diffopts.get('stat')
-        self._includediff = diffopts and diffopts.get('patch')
+        self._includestat = diffopts and diffopts.get(b'stat')
+        self._includediff = diffopts and diffopts.get(b'patch')
         self.header = {}
         self.hunk = {}
         self.lastheader = None
@@ -269,17 +269,17 @@
     def _show(self, ctx, copies, props):
         '''show a single changeset or file revision'''
         changenode = ctx.node()
-        graphwidth = props.get('graphwidth', 0)
+        graphwidth = props.get(b'graphwidth', 0)
 
         if self.ui.quiet:
             self.ui.write(
-                "%s\n" % scmutil.formatchangeid(ctx), label='log.node'
+                b"%s\n" % scmutil.formatchangeid(ctx), label=b'log.node'
             )
             return
 
         columns = self._columns
         self.ui.write(
-            columns['changeset'] % scmutil.formatchangeid(ctx),
+            columns[b'changeset'] % scmutil.formatchangeid(ctx),
             label=changesetlabels(ctx),
         )
 
@@ -287,24 +287,26 @@
         # compatibility
         branch = ctx.branch()
         # don't show the default branch name
-        if branch != 'default':
-            self.ui.write(columns['branch'] % branch, label='log.branch')
+        if branch != b'default':
+            self.ui.write(columns[b'branch'] % branch, label=b'log.branch')
 
         for nsname, ns in self.repo.names.iteritems():
             # branches has special logic already handled above, so here we just
             # skip it
-            if nsname == 'branches':
+            if nsname == b'branches':
                 continue
             # we will use the templatename as the color name since those two
             # should be the same
             for name in ns.names(self.repo, changenode):
-                self.ui.write(ns.logfmt % name, label='log.%s' % ns.colorname)
+                self.ui.write(ns.logfmt % name, label=b'log.%s' % ns.colorname)
         if self.ui.debugflag:
-            self.ui.write(columns['phase'] % ctx.phasestr(), label='log.phase')
+            self.ui.write(
+                columns[b'phase'] % ctx.phasestr(), label=b'log.phase'
+            )
         for pctx in scmutil.meaningfulparents(self.repo, ctx):
-            label = 'log.parent changeset.%s' % pctx.phasestr()
+            label = b'log.parent changeset.%s' % pctx.phasestr()
             self.ui.write(
-                columns['parent'] % scmutil.formatchangeid(pctx), label=label
+                columns[b'parent'] % scmutil.formatchangeid(pctx), label=label
             )
 
         if self.ui.debugflag:
@@ -315,20 +317,20 @@
             else:
                 mrev = self.repo.manifestlog.rev(mnode)
             self.ui.write(
-                columns['manifest']
+                columns[b'manifest']
                 % scmutil.formatrevnode(self.ui, mrev, mnode),
-                label='ui.debug log.manifest',
+                label=b'ui.debug log.manifest',
             )
-        self.ui.write(columns['user'] % ctx.user(), label='log.user')
+        self.ui.write(columns[b'user'] % ctx.user(), label=b'log.user')
         self.ui.write(
-            columns['date'] % dateutil.datestr(ctx.date()), label='log.date'
+            columns[b'date'] % dateutil.datestr(ctx.date()), label=b'log.date'
         )
 
         if ctx.isunstable():
             instabilities = ctx.instabilities()
             self.ui.write(
-                columns['instability'] % ', '.join(instabilities),
-                label='log.instability',
+                columns[b'instability'] % b', '.join(instabilities),
+                label=b'log.instability',
             )
 
         elif ctx.obsolete():
@@ -338,45 +340,46 @@
 
         if self.ui.debugflag:
             files = ctx.p1().status(ctx)[:3]
-            for key, value in zip(['files', 'files+', 'files-'], files):
+            for key, value in zip([b'files', b'files+', b'files-'], files):
                 if value:
                     self.ui.write(
-                        columns[key] % " ".join(value),
-                        label='ui.debug log.files',
+                        columns[key] % b" ".join(value),
+                        label=b'ui.debug log.files',
                     )
         elif ctx.files() and self.ui.verbose:
             self.ui.write(
-                columns['files'] % " ".join(ctx.files()),
-                label='ui.note log.files',
+                columns[b'files'] % b" ".join(ctx.files()),
+                label=b'ui.note log.files',
             )
         if copies and self.ui.verbose:
-            copies = ['%s (%s)' % c for c in copies]
+            copies = [b'%s (%s)' % c for c in copies]
             self.ui.write(
-                columns['copies'] % ' '.join(copies), label='ui.note log.copies'
+                columns[b'copies'] % b' '.join(copies),
+                label=b'ui.note log.copies',
             )
 
         extra = ctx.extra()
         if extra and self.ui.debugflag:
             for key, value in sorted(extra.items()):
                 self.ui.write(
-                    columns['extra'] % (key, stringutil.escapestr(value)),
-                    label='ui.debug log.extra',
+                    columns[b'extra'] % (key, stringutil.escapestr(value)),
+                    label=b'ui.debug log.extra',
                 )
 
         description = ctx.description().strip()
         if description:
             if self.ui.verbose:
                 self.ui.write(
-                    _("description:\n"), label='ui.note log.description'
+                    _(b"description:\n"), label=b'ui.note log.description'
                 )
-                self.ui.write(description, label='ui.note log.description')
-                self.ui.write("\n\n")
+                self.ui.write(description, label=b'ui.note log.description')
+                self.ui.write(b"\n\n")
             else:
                 self.ui.write(
-                    columns['summary'] % description.splitlines()[0],
-                    label='log.summary',
+                    columns[b'summary'] % description.splitlines()[0],
+                    label=b'log.summary',
                 )
-        self.ui.write("\n")
+        self.ui.write(b"\n")
 
         self._showpatch(ctx, graphwidth)
 
@@ -385,16 +388,17 @@
         tres = formatter.templateresources(self.repo.ui, self.repo)
         t = formatter.maketemplater(
             self.repo.ui,
-            '{join(obsfate, "\n")}',
+            b'{join(obsfate, "\n")}',
             defaults=templatekw.keywords,
             resources=tres,
         )
-        obsfate = t.renderdefault({'ctx': ctx}).splitlines()
+        obsfate = t.renderdefault({b'ctx': ctx}).splitlines()
 
         if obsfate:
             for obsfateline in obsfate:
                 self.ui.write(
-                    self._columns['obsolete'] % obsfateline, label='log.obsfate'
+                    self._columns[b'obsolete'] % obsfateline,
+                    label=b'log.obsfate',
                 )
 
     def _exthook(self, ctx):
@@ -407,13 +411,13 @@
                 self.ui, ctx, self._diffopts, graphwidth, stat=True
             )
         if self._includestat and self._includediff:
-            self.ui.write("\n")
+            self.ui.write(b"\n")
         if self._includediff:
             self._differ.showdiff(
                 self.ui, ctx, self._diffopts, graphwidth, stat=False
             )
         if self._includestat or self._includediff:
-            self.ui.write("\n")
+            self.ui.write(b"\n")
 
 
 class changesetformatter(changesetprinter):
@@ -445,10 +449,10 @@
             user=ctx.user(),
             date=fm.formatdate(ctx.date()),
             desc=ctx.description(),
-            bookmarks=fm.formatlist(ctx.bookmarks(), name='bookmark'),
-            tags=fm.formatlist(ctx.tags(), name='tag'),
+            bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark'),
+            tags=fm.formatlist(ctx.tags(), name=b'tag'),
             parents=fm.formatlist(
-                [fm.hexfunc(c.node()) for c in ctx.parents()], name='node'
+                [fm.hexfunc(c.node()) for c in ctx.parents()], name=b'node'
             ),
         )
 
@@ -460,16 +464,16 @@
 
             files = ctx.p1().status(ctx)
             fm.data(
-                modified=fm.formatlist(files[0], name='file'),
-                added=fm.formatlist(files[1], name='file'),
-                removed=fm.formatlist(files[2], name='file'),
+                modified=fm.formatlist(files[0], name=b'file'),
+                added=fm.formatlist(files[1], name=b'file'),
+                removed=fm.formatlist(files[2], name=b'file'),
             )
 
         elif self.ui.verbose:
-            fm.data(files=fm.formatlist(ctx.files(), name='file'))
+            fm.data(files=fm.formatlist(ctx.files(), name=b'file'))
             if copies:
                 fm.data(
-                    copies=fm.formatdict(copies, key='name', value='source')
+                    copies=fm.formatdict(copies, key=b'name', value=b'source')
                 )
 
         if self._includestat:
@@ -510,21 +514,21 @@
 
         self._tref = tmplspec.ref
         self._parts = {
-            'header': '',
-            'footer': '',
+            b'header': b'',
+            b'footer': b'',
             tmplspec.ref: tmplspec.ref,
-            'docheader': '',
-            'docfooter': '',
-            'separator': '',
+            b'docheader': b'',
+            b'docfooter': b'',
+            b'separator': b'',
         }
         if tmplspec.mapfile:
             # find correct templates for current mode, for backward
             # compatibility with 'log -v/-q/--debug' using a mapfile
             tmplmodes = [
-                (True, ''),
-                (self.ui.verbose, '_verbose'),
-                (self.ui.quiet, '_quiet'),
-                (self.ui.debugflag, '_debug'),
+                (True, b''),
+                (self.ui.verbose, b'_verbose'),
+                (self.ui.quiet, b'_quiet'),
+                (self.ui.debugflag, b'_debug'),
             ]
             for mode, postfix in tmplmodes:
                 for t in self._parts:
@@ -536,33 +540,33 @@
             m = formatter.templatepartsmap(tmplspec, self.t, partnames)
             self._parts.update(m)
 
-        if self._parts['docheader']:
-            self.ui.write(self.t.render(self._parts['docheader'], {}))
+        if self._parts[b'docheader']:
+            self.ui.write(self.t.render(self._parts[b'docheader'], {}))
 
     def close(self):
-        if self._parts['docfooter']:
+        if self._parts[b'docfooter']:
             if not self.footer:
-                self.footer = ""
-            self.footer += self.t.render(self._parts['docfooter'], {})
+                self.footer = b""
+            self.footer += self.t.render(self._parts[b'docfooter'], {})
         return super(changesettemplater, self).close()
 
     def _show(self, ctx, copies, props):
         '''show a single changeset or file revision'''
         props = props.copy()
-        props['ctx'] = ctx
-        props['index'] = index = next(self._counter)
-        props['revcache'] = {'copies': copies}
-        graphwidth = props.get('graphwidth', 0)
+        props[b'ctx'] = ctx
+        props[b'index'] = index = next(self._counter)
+        props[b'revcache'] = {b'copies': copies}
+        graphwidth = props.get(b'graphwidth', 0)
 
         # write separator, which wouldn't work well with the header part below
         # since there's inherently a conflict between header (across items) and
         # separator (per item)
-        if self._parts['separator'] and index > 0:
-            self.ui.write(self.t.render(self._parts['separator'], {}))
+        if self._parts[b'separator'] and index > 0:
+            self.ui.write(self.t.render(self._parts[b'separator'], {}))
 
         # write header
-        if self._parts['header']:
-            h = self.t.render(self._parts['header'], props)
+        if self._parts[b'header']:
+            h = self.t.render(self._parts[b'header'], props)
             if self.buffered:
                 self.header[ctx.rev()] = h
             else:
@@ -575,18 +579,18 @@
         self.ui.write(self.t.render(key, props))
         self._showpatch(ctx, graphwidth)
 
-        if self._parts['footer']:
+        if self._parts[b'footer']:
             if not self.footer:
-                self.footer = self.t.render(self._parts['footer'], props)
+                self.footer = self.t.render(self._parts[b'footer'], props)
 
 
 def templatespec(tmpl, mapfile):
     if pycompat.ispy3:
-        assert not isinstance(tmpl, str), 'tmpl must not be a str'
+        assert not isinstance(tmpl, str), b'tmpl must not be a str'
     if mapfile:
-        return formatter.templatespec('changeset', tmpl, mapfile)
+        return formatter.templatespec(b'changeset', tmpl, mapfile)
     else:
-        return formatter.templatespec('', tmpl, None)
+        return formatter.templatespec(b'', tmpl, None)
 
 
 def _lookuptemplate(ui, tmpl, style):
@@ -597,17 +601,17 @@
 
     # ui settings
     if not tmpl and not style:  # template are stronger than style
-        tmpl = ui.config('ui', 'logtemplate')
+        tmpl = ui.config(b'ui', b'logtemplate')
         if tmpl:
             return templatespec(templater.unquotestring(tmpl), None)
         else:
-            style = util.expandpath(ui.config('ui', 'style'))
+            style = util.expandpath(ui.config(b'ui', b'style'))
 
     if not tmpl and style:
         mapfile = style
         if not os.path.split(mapfile)[0]:
             mapname = templater.templatepath(
-                'map-cmdline.' + mapfile
+                b'map-cmdline.' + mapfile
             ) or templater.templatepath(mapfile)
             if mapname:
                 mapfile = mapname
@@ -616,7 +620,7 @@
     if not tmpl:
         return templatespec(None, None)
 
-    return formatter.lookuptemplate(ui, 'changeset', tmpl)
+    return formatter.lookuptemplate(ui, b'changeset', tmpl)
 
 
 def maketemplater(ui, repo, tmpl, buffered=False):
@@ -638,11 +642,11 @@
     regular display via changesetprinter() is done.
     """
     postargs = (differ, opts, buffered)
-    if opts.get('template') in {'cbor', 'json'}:
-        fm = ui.formatter('log', opts)
+    if opts.get(b'template') in {b'cbor', b'json'}:
+        fm = ui.formatter(b'log', opts)
         return changesetformatter(ui, repo, fm, *postargs)
 
-    spec = _lookuptemplate(ui, opts.get('template'), opts.get('style'))
+    spec = _lookuptemplate(ui, opts.get(b'template'), opts.get(b'style'))
 
     if not spec.ref and not spec.tmpl and not spec.mapfile:
         return changesetprinter(ui, repo, *postargs)
@@ -666,11 +670,11 @@
     # platforms without shell expansion (windows).
     wctx = repo[None]
     match, pats = scmutil.matchandpats(wctx, pats, opts)
-    slowpath = match.anypats() or (not match.always() and opts.get('removed'))
+    slowpath = match.anypats() or (not match.always() and opts.get(b'removed'))
     if not slowpath:
-        follow = opts.get('follow') or opts.get('follow_first')
+        follow = opts.get(b'follow') or opts.get(b'follow_first')
         startctxs = []
-        if follow and opts.get('rev'):
+        if follow and opts.get(b'rev'):
             startctxs = [repo[r] for r in revs]
         for f in match.files():
             if follow and startctxs:
@@ -687,7 +691,10 @@
                     continue
                 else:
                     raise error.Abort(
-                        _('cannot follow file not in parent ' 'revision: "%s"')
+                        _(
+                            b'cannot follow file not in parent '
+                            b'revision: "%s"'
+                        )
                         % f
                     )
             filelog = repo.file(f)
@@ -696,7 +703,7 @@
                 # try to find matching entries on the slow path.
                 if follow:
                     raise error.Abort(
-                        _('cannot follow nonexistent file: "%s"') % f
+                        _(b'cannot follow nonexistent file: "%s"') % f
                     )
                 slowpath = True
 
@@ -706,7 +713,7 @@
         # slowpath; otherwise, we can turn off the slowpath
         if slowpath:
             for path in match.files():
-                if path == '.' or path in repo.store:
+                if path == b'.' or path in repo.store:
                     break
             else:
                 slowpath = False
@@ -744,15 +751,15 @@
 
 
 _opt2logrevset = {
-    'no_merges': ('not merge()', None),
-    'only_merges': ('merge()', None),
-    '_matchfiles': (None, '_matchfiles(%ps)'),
-    'date': ('date(%s)', None),
-    'branch': ('branch(%s)', '%lr'),
-    '_patslog': ('filelog(%s)', '%lr'),
-    'keyword': ('keyword(%s)', '%lr'),
-    'prune': ('ancestors(%s)', 'not %lr'),
-    'user': ('user(%s)', '%lr'),
+    b'no_merges': (b'not merge()', None),
+    b'only_merges': (b'merge()', None),
+    b'_matchfiles': (None, b'_matchfiles(%ps)'),
+    b'date': (b'date(%s)', None),
+    b'branch': (b'branch(%s)', b'%lr'),
+    b'_patslog': (b'filelog(%s)', b'%lr'),
+    b'keyword': (b'keyword(%s)', b'%lr'),
+    b'prune': (b'ancestors(%s)', b'not %lr'),
+    b'user': (b'user(%s)', b'%lr'),
 }
 
 
@@ -760,12 +767,12 @@
     """Return a revset string built from log options and file patterns"""
     opts = dict(opts)
     # follow or not follow?
-    follow = opts.get('follow') or opts.get('follow_first')
+    follow = opts.get(b'follow') or opts.get(b'follow_first')
 
     # branch and only_branch are really aliases and must be handled at
     # the same time
-    opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
-    opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
+    opts[b'branch'] = opts.get(b'branch', []) + opts.get(b'only_branch', [])
+    opts[b'branch'] = [repo.lookupbranch(b) for b in opts[b'branch']]
 
     if slowpath:
         # See walkchangerevs() slow path.
@@ -776,16 +783,16 @@
         # "a" and "b" while "file(a) and not file(b)" does
         # not. Besides, filesets are evaluated against the working
         # directory.
-        matchargs = ['r:', 'd:relpath']
+        matchargs = [b'r:', b'd:relpath']
         for p in pats:
-            matchargs.append('p:' + p)
-        for p in opts.get('include', []):
-            matchargs.append('i:' + p)
-        for p in opts.get('exclude', []):
-            matchargs.append('x:' + p)
-        opts['_matchfiles'] = matchargs
+            matchargs.append(b'p:' + p)
+        for p in opts.get(b'include', []):
+            matchargs.append(b'i:' + p)
+        for p in opts.get(b'exclude', []):
+            matchargs.append(b'x:' + p)
+        opts[b'_matchfiles'] = matchargs
     elif not follow:
-        opts['_patslog'] = list(pats)
+        opts[b'_patslog'] = list(pats)
 
     expr = []
     for op, val in sorted(opts.iteritems()):
@@ -794,7 +801,7 @@
         if op not in _opt2logrevset:
             continue
         revop, listop = _opt2logrevset[op]
-        if revop and '%' not in revop:
+        if revop and b'%' not in revop:
             expr.append(revop)
         elif not listop:
             expr.append(revsetlang.formatspec(revop, val))
@@ -804,7 +811,7 @@
             expr.append(revsetlang.formatspec(listop, val))
 
     if expr:
-        expr = '(' + ' and '.join(expr) + ')'
+        expr = b'(' + b' and '.join(expr) + b')'
     else:
         expr = None
     return expr
@@ -812,13 +819,13 @@
 
 def _initialrevs(repo, opts):
     """Return the initial set of revisions to be filtered or followed"""
-    follow = opts.get('follow') or opts.get('follow_first')
-    if opts.get('rev'):
-        revs = scmutil.revrange(repo, opts['rev'])
+    follow = opts.get(b'follow') or opts.get(b'follow_first')
+    if opts.get(b'rev'):
+        revs = scmutil.revrange(repo, opts[b'rev'])
     elif follow and repo.dirstate.p1() == nullid:
         revs = smartset.baseset()
     elif follow:
-        revs = repo.revs('.')
+        revs = repo.revs(b'.')
     else:
         revs = smartset.spanset(repo)
         revs.reverse()
@@ -830,8 +837,8 @@
 
     differ is a changesetdiffer with pre-configured file matcher.
     """
-    follow = opts.get('follow') or opts.get('follow_first')
-    followfirst = opts.get('follow_first')
+    follow = opts.get(b'follow') or opts.get(b'follow_first')
+    followfirst = opts.get(b'follow_first')
     limit = getlimit(opts)
     revs = _initialrevs(repo, opts)
     if not revs:
@@ -852,10 +859,10 @@
             return match
 
     expr = _makerevset(repo, match, pats, slowpath, opts)
-    if opts.get('graph'):
+    if opts.get(b'graph'):
         # User-specified revs might be unsorted, but don't sort before
         # _makerevset because it might depend on the order of revs
-        if repo.ui.configbool('experimental', 'log.topo'):
+        if repo.ui.configbool(b'experimental', b'log.topo'):
             if not revs.istopo():
                 revs = dagop.toposort(revs, repo.changelog.parentrevs)
                 # TODO: try to iterate the set lazily
@@ -878,16 +885,16 @@
     (fromline, toline)).
     """
     linerangebyfname = []
-    for pat in opts.get('line_range', []):
+    for pat in opts.get(b'line_range', []):
         try:
-            pat, linerange = pat.rsplit(',', 1)
+            pat, linerange = pat.rsplit(b',', 1)
         except ValueError:
-            raise error.Abort(_('malformatted line-range pattern %s') % pat)
+            raise error.Abort(_(b'malformatted line-range pattern %s') % pat)
         try:
-            fromline, toline = map(int, linerange.split(':'))
+            fromline, toline = map(int, linerange.split(b':'))
         except ValueError:
-            raise error.Abort(_("invalid line range for %s") % pat)
-        msg = _("line range pattern '%s' must match exactly one file") % pat
+            raise error.Abort(_(b"invalid line range for %s") % pat)
+        msg = _(b"line range pattern '%s' must match exactly one file") % pat
         fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
         linerangebyfname.append(
             (fname, util.processlinerange(fromline, toline))
@@ -911,7 +918,8 @@
     for fname, (fromline, toline) in _parselinerangeopt(repo, opts):
         if fname not in wctx:
             raise error.Abort(
-                _('cannot follow file not in parent ' 'revision: "%s"') % fname
+                _(b'cannot follow file not in parent ' b'revision: "%s"')
+                % fname
             )
         fctx = wctx.filectx(fname)
         for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
@@ -958,7 +966,7 @@
 
 
 def _graphnodeformatter(ui, displayer):
-    spec = ui.config('ui', 'graphnodetemplate')
+    spec = ui.config(b'ui', b'graphnodetemplate')
     if not spec:
         return templatekw.getgraphnode  # fast path for "{graphnode}"
 
@@ -973,7 +981,7 @@
     )
 
     def formatnode(repo, ctx):
-        props = {'ctx': ctx, 'repo': repo}
+        props = {b'ctx': ctx, b'repo': repo}
         return templ.renderdefault(props)
 
     return formatnode
@@ -983,28 +991,28 @@
     props = props or {}
     formatnode = _graphnodeformatter(ui, displayer)
     state = graphmod.asciistate()
-    styles = state['styles']
+    styles = state[b'styles']
 
     # only set graph styling if HGPLAIN is not set.
-    if ui.plain('graph'):
+    if ui.plain(b'graph'):
         # set all edge styles to |, the default pre-3.8 behaviour
-        styles.update(dict.fromkeys(styles, '|'))
+        styles.update(dict.fromkeys(styles, b'|'))
     else:
         edgetypes = {
-            'parent': graphmod.PARENT,
-            'grandparent': graphmod.GRANDPARENT,
-            'missing': graphmod.MISSINGPARENT,
+            b'parent': graphmod.PARENT,
+            b'grandparent': graphmod.GRANDPARENT,
+            b'missing': graphmod.MISSINGPARENT,
         }
         for name, key in edgetypes.items():
             # experimental config: experimental.graphstyle.*
             styles[key] = ui.config(
-                'experimental', 'graphstyle.%s' % name, styles[key]
+                b'experimental', b'graphstyle.%s' % name, styles[key]
             )
             if not styles[key]:
                 styles[key] = None
 
         # experimental config: experimental.graphshorten
-        state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
+        state[b'graphshorten'] = ui.configbool(b'experimental', b'graphshorten')
 
     for rev, type, ctx, parents in dag:
         char = formatnode(repo, ctx)
@@ -1015,7 +1023,7 @@
         displayer.show(
             ctx, copies=copies, graphwidth=width, **pycompat.strkwargs(props)
         )
-        lines = displayer.hunk.pop(rev).split('\n')
+        lines = displayer.hunk.pop(rev).split(b'\n')
         if not lines[-1]:
             del lines[-1]
         displayer.flush(ctx)
@@ -1040,11 +1048,11 @@
 
 
 def checkunsupportedgraphflags(pats, opts):
-    for op in ["newest_first"]:
+    for op in [b"newest_first"]:
         if op in opts and opts[op]:
             raise error.Abort(
-                _("-G/--graph option is incompatible with --%s")
-                % op.replace("_", "-")
+                _(b"-G/--graph option is incompatible with --%s")
+                % op.replace(b"_", b"-")
             )
 
 
--- a/mercurial/logexchange.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/logexchange.py	Sun Oct 06 09:48:39 2019 -0400
@@ -16,7 +16,7 @@
 )
 
 # directory name in .hg/ in which remotenames files will be present
-remotenamedir = 'logexchange'
+remotenamedir = b'logexchange'
 
 
 def readremotenamefile(repo, filename):
@@ -39,7 +39,7 @@
         if lineno == 0:
             lineno += 1
         try:
-            node, remote, rname = line.split('\0')
+            node, remote, rname = line.split(b'\0')
             yield node, remote, rname
         except ValueError:
             pass
@@ -55,32 +55,32 @@
     information, call the respective functions.
     """
 
-    for bmentry in readremotenamefile(repo, 'bookmarks'):
+    for bmentry in readremotenamefile(repo, b'bookmarks'):
         yield bmentry
-    for branchentry in readremotenamefile(repo, 'branches'):
+    for branchentry in readremotenamefile(repo, b'branches'):
         yield branchentry
 
 
 def writeremotenamefile(repo, remotepath, names, nametype):
     vfs = vfsmod.vfs(repo.vfs.join(remotenamedir))
-    f = vfs(nametype, 'w', atomictemp=True)
+    f = vfs(nametype, b'w', atomictemp=True)
     # write the storage version info on top of file
     # version '0' represents the very initial version of the storage format
-    f.write('0\n\n')
+    f.write(b'0\n\n')
 
     olddata = set(readremotenamefile(repo, nametype))
     # re-save the data from a different remote than this one.
     for node, oldpath, rname in sorted(olddata):
         if oldpath != remotepath:
-            f.write('%s\0%s\0%s\n' % (node, oldpath, rname))
+            f.write(b'%s\0%s\0%s\n' % (node, oldpath, rname))
 
     for name, node in sorted(names.iteritems()):
-        if nametype == "branches":
+        if nametype == b"branches":
             for n in node:
-                f.write('%s\0%s\0%s\n' % (n, remotepath, name))
-        elif nametype == "bookmarks":
+                f.write(b'%s\0%s\0%s\n' % (n, remotepath, name))
+        elif nametype == b"bookmarks":
             if node:
-                f.write('%s\0%s\0%s\n' % (node, remotepath, name))
+                f.write(b'%s\0%s\0%s\n' % (node, remotepath, name))
 
     f.close()
 
@@ -93,9 +93,9 @@
     wlock = repo.wlock()
     try:
         if bookmarks:
-            writeremotenamefile(repo, remotepath, bookmarks, 'bookmarks')
+            writeremotenamefile(repo, remotepath, bookmarks, b'bookmarks')
         if branches:
-            writeremotenamefile(repo, remotepath, branches, 'branches')
+            writeremotenamefile(repo, remotepath, branches, b'branches')
     finally:
         wlock.release()
 
@@ -114,7 +114,7 @@
         rpath = remote._url
 
     # represent the remotepath with user defined path name if exists
-    for path, url in repo.ui.configitems('paths'):
+    for path, url in repo.ui.configitems(b'paths'):
         # remove auth info from user defined url
         noauthurl = util.removeauth(url)
 
@@ -140,7 +140,7 @@
 
     with remoterepo.commandexecutor() as e:
         bookmarks = e.callcommand(
-            'listkeys', {'namespace': 'bookmarks',}
+            b'listkeys', {b'namespace': b'bookmarks',}
         ).result()
 
     # on a push, we don't want to keep obsolete heads since
@@ -151,7 +151,7 @@
     repo = localrepo.unfiltered()
 
     with remoterepo.commandexecutor() as e:
-        branchmap = e.callcommand('branchmap', {}).result()
+        branchmap = e.callcommand(b'branchmap', {}).result()
 
     for branch, nodes in branchmap.iteritems():
         bmap[branch] = []
--- a/mercurial/loggingutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/loggingutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -31,7 +31,7 @@
         except OSError as err:
             if err.errno != errno.ENOENT:
                 ui.debug(
-                    "warning: cannot remove '%s': %s\n"
+                    b"warning: cannot remove '%s': %s\n"
                     % (newpath, err.strerror)
                 )
         try:
@@ -40,7 +40,7 @@
         except OSError as err:
             if err.errno != errno.ENOENT:
                 ui.debug(
-                    "warning: cannot rename '%s' to '%s': %s\n"
+                    b"warning: cannot rename '%s' to '%s': %s\n"
                     % (newpath, oldpath, err.strerror)
                 )
 
@@ -54,11 +54,11 @@
                 path = vfs.join(name)
                 for i in pycompat.xrange(maxfiles - 1, 1, -1):
                     rotate(
-                        oldpath='%s.%d' % (path, i - 1),
-                        newpath='%s.%d' % (path, i),
+                        oldpath=b'%s.%d' % (path, i - 1),
+                        newpath=b'%s.%d' % (path, i),
                     )
-                rotate(oldpath=path, newpath=maxfiles > 0 and path + '.1')
-    return vfs(name, 'a', makeparentdirs=False)
+                rotate(oldpath=path, newpath=maxfiles > 0 and path + b'.1')
+    return vfs(name, b'a', makeparentdirs=False)
 
 
 def _formatlogline(msg):
--- a/mercurial/lsprof.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/lsprof.py	Sun Oct 06 09:48:39 2019 -0400
@@ -8,7 +8,7 @@
 # PyPy doesn't expose profiler_entry from the module.
 profiler_entry = getattr(_lsprof, 'profiler_entry', None)
 
-__all__ = ['profile', 'Stats']
+__all__ = [b'profile', b'Stats']
 
 
 def profile(f, *args, **kwds):
@@ -33,9 +33,9 @@
         # profiler_entries isn't defined when running under PyPy.
         if profiler_entry:
             if crit not in profiler_entry.__dict__:
-                raise ValueError("Can't sort by %s" % crit)
+                raise ValueError(b"Can't sort by %s" % crit)
         elif self.data and not getattr(self.data[0], crit, None):
-            raise ValueError("Can't sort by %s" % crit)
+            raise ValueError(b"Can't sort by %s" % crit)
 
         self.data.sort(key=lambda x: getattr(x, crit), reverse=True)
         for e in self.data:
@@ -49,16 +49,16 @@
         d = self.data
         if top is not None:
             d = d[:top]
-        cols = "% 12d %12d %11.4f %11.4f   %s\n"
-        hcols = "% 12s %12s %12s %12s %s\n"
+        cols = b"% 12d %12d %11.4f %11.4f   %s\n"
+        hcols = b"% 12s %12s %12s %12s %s\n"
         file.write(
             hcols
             % (
-                "CallCount",
-                "Recursive",
-                "Total(s)",
-                "Inline(s)",
-                "module:lineno(function)",
+                b"CallCount",
+                b"Recursive",
+                b"Total(s)",
+                b"Inline(s)",
+                b"module:lineno(function)",
             )
         )
         count = 0
@@ -86,7 +86,7 @@
                             se.reccallcount,
                             se.totaltime,
                             se.inlinetime,
-                            "    %s" % label(se.code),
+                            b"    %s" % label(se.code),
                         )
                     )
                     count += 1
@@ -147,7 +147,7 @@
 
     sys.argv = sys.argv[1:]
     if not sys.argv:
-        print("usage: lsprof.py <script> <arguments...>", file=sys.stderr)
+        print(b"usage: lsprof.py <script> <arguments...>", file=sys.stderr)
         sys.exit(2)
     sys.path.insert(0, os.path.abspath(os.path.dirname(sys.argv[0])))
     stats = profile(execfile, sys.argv[0], globals(), locals())
--- a/mercurial/lsprofcalltree.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/lsprofcalltree.py	Sun Oct 06 09:48:39 2019 -0400
@@ -18,9 +18,9 @@
 def label(code):
     if isinstance(code, str):
         # built-in functions ('~' sorts at the end)
-        return '~' + pycompat.sysbytes(code)
+        return b'~' + pycompat.sysbytes(code)
     else:
-        return '%s %s:%d' % (
+        return b'%s %s:%d' % (
             pycompat.sysbytes(code.co_name),
             pycompat.sysbytes(code.co_filename),
             code.co_firstlineno,
--- a/mercurial/mail.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/mail.py	Sun Oct 06 09:48:39 2019 -0400
@@ -44,10 +44,10 @@
         self._host = host
 
     def starttls(self, keyfile=None, certfile=None):
-        if not self.has_extn("starttls"):
-            msg = "STARTTLS extension not supported by server"
+        if not self.has_extn(b"starttls"):
+            msg = b"STARTTLS extension not supported by server"
             raise smtplib.SMTPException(msg)
-        (resp, reply) = self.docmd("STARTTLS")
+        (resp, reply) = self.docmd(b"STARTTLS")
         if resp == 220:
             self.sock = sslutil.wrapsocket(
                 self.sock,
@@ -80,7 +80,7 @@
 
     def _get_socket(self, host, port, timeout):
         if self.debuglevel > 0:
-            self._ui.debug('connect: %r\n' % ((host, port),))
+            self._ui.debug(b'connect: %r\n' % ((host, port),))
         new_socket = socket.create_connection((host, port), timeout)
         new_socket = sslutil.wrapsocket(
             new_socket,
@@ -106,18 +106,18 @@
 
 def _smtp(ui):
     '''build an smtp connection and return a function to send mail'''
-    local_hostname = ui.config('smtp', 'local_hostname')
-    tls = ui.config('smtp', 'tls')
+    local_hostname = ui.config(b'smtp', b'local_hostname')
+    tls = ui.config(b'smtp', b'tls')
     # backward compatible: when tls = true, we use starttls.
-    starttls = tls == 'starttls' or stringutil.parsebool(tls)
-    smtps = tls == 'smtps'
+    starttls = tls == b'starttls' or stringutil.parsebool(tls)
+    smtps = tls == b'smtps'
     if (starttls or smtps) and not _pyhastls():
-        raise error.Abort(_("can't use TLS: Python SSL support not installed"))
-    mailhost = ui.config('smtp', 'host')
+        raise error.Abort(_(b"can't use TLS: Python SSL support not installed"))
+    mailhost = ui.config(b'smtp', b'host')
     if not mailhost:
-        raise error.Abort(_('smtp.host not configured - cannot send mail'))
+        raise error.Abort(_(b'smtp.host not configured - cannot send mail'))
     if smtps:
-        ui.note(_('(using smtps)\n'))
+        ui.note(_(b'(using smtps)\n'))
         s = SMTPS(ui, local_hostname=local_hostname, host=mailhost)
     elif starttls:
         s = STARTTLS(ui, local_hostname=local_hostname, host=mailhost)
@@ -127,23 +127,23 @@
         defaultport = 465
     else:
         defaultport = 25
-    mailport = util.getport(ui.config('smtp', 'port', defaultport))
-    ui.note(_('sending mail: smtp host %s, port %d\n') % (mailhost, mailport))
+    mailport = util.getport(ui.config(b'smtp', b'port', defaultport))
+    ui.note(_(b'sending mail: smtp host %s, port %d\n') % (mailhost, mailport))
     s.connect(host=mailhost, port=mailport)
     if starttls:
-        ui.note(_('(using starttls)\n'))
+        ui.note(_(b'(using starttls)\n'))
         s.ehlo()
         s.starttls()
         s.ehlo()
     if starttls or smtps:
-        ui.note(_('(verifying remote certificate)\n'))
+        ui.note(_(b'(verifying remote certificate)\n'))
         sslutil.validatesocket(s.sock)
-    username = ui.config('smtp', 'username')
-    password = ui.config('smtp', 'password')
+    username = ui.config(b'smtp', b'username')
+    password = ui.config(b'smtp', b'password')
     if username and not password:
         password = ui.getpass()
     if username and password:
-        ui.note(_('(authenticating to mail server as %s)\n') % username)
+        ui.note(_(b'(authenticating to mail server as %s)\n') % username)
         try:
             s.login(username, password)
         except smtplib.SMTPException as inst:
@@ -154,7 +154,7 @@
             return s.sendmail(sender, recipients, msg)
         except smtplib.SMTPRecipientsRefused as inst:
             recipients = [r[1] for r in inst.recipients.values()]
-            raise error.Abort('\n' + '\n'.join(recipients))
+            raise error.Abort(b'\n' + b'\n'.join(recipients))
         except smtplib.SMTPException as inst:
             raise error.Abort(inst)
 
@@ -163,22 +163,22 @@
 
 def _sendmail(ui, sender, recipients, msg):
     '''send mail using sendmail.'''
-    program = ui.config('email', 'method')
+    program = ui.config(b'email', b'method')
     stremail = lambda x: (
         procutil.quote(stringutil.email(encoding.strtolocal(x)))
     )
-    cmdline = '%s -f %s %s' % (
+    cmdline = b'%s -f %s %s' % (
         program,
         stremail(sender),
-        ' '.join(map(stremail, recipients)),
+        b' '.join(map(stremail, recipients)),
     )
-    ui.note(_('sending mail: %s\n') % cmdline)
-    fp = procutil.popen(cmdline, 'wb')
+    ui.note(_(b'sending mail: %s\n') % cmdline)
+    fp = procutil.popen(cmdline, b'wb')
     fp.write(util.tonativeeol(msg))
     ret = fp.close()
     if ret:
         raise error.Abort(
-            '%s %s'
+            b'%s %s'
             % (
                 os.path.basename(program.split(None, 1)[0]),
                 procutil.explainexit(ret),
@@ -188,16 +188,16 @@
 
 def _mbox(mbox, sender, recipients, msg):
     '''write mails to mbox'''
-    fp = open(mbox, 'ab+')
+    fp = open(mbox, b'ab+')
     # Should be time.asctime(), but Windows prints 2-characters day
     # of month instead of one. Make them print the same thing.
     date = time.strftime(r'%a %b %d %H:%M:%S %Y', time.localtime())
     fp.write(
-        'From %s %s\n'
+        b'From %s %s\n'
         % (encoding.strtolocal(sender), encoding.strtolocal(date))
     )
     fp.write(msg)
-    fp.write('\n\n')
+    fp.write(b'\n\n')
     fp.close()
 
 
@@ -205,9 +205,9 @@
     '''make a mail connection. return a function to send mail.
     call as sendmail(sender, list-of-recipients, msg).'''
     if mbox:
-        open(mbox, 'wb').close()
+        open(mbox, b'wb').close()
         return lambda s, r, m: _mbox(mbox, s, r, m)
-    if ui.config('email', 'method') == 'smtp':
+    if ui.config(b'email', b'method') == b'smtp':
         return _smtp(ui)
     return lambda s, r, m: _sendmail(ui, s, r, m)
 
@@ -219,19 +219,19 @@
 
 def validateconfig(ui):
     '''determine if we have enough config data to try sending email.'''
-    method = ui.config('email', 'method')
-    if method == 'smtp':
-        if not ui.config('smtp', 'host'):
+    method = ui.config(b'email', b'method')
+    if method == b'smtp':
+        if not ui.config(b'smtp', b'host'):
             raise error.Abort(
                 _(
-                    'smtp specified as email transport, '
-                    'but no smtp host configured'
+                    b'smtp specified as email transport, '
+                    b'but no smtp host configured'
                 )
             )
     else:
         if not procutil.findexe(method):
             raise error.Abort(
-                _('%r specified as email transport, ' 'but not in PATH')
+                _(b'%r specified as email transport, ' b'but not in PATH')
                 % method
             )
 
@@ -241,21 +241,21 @@
     cs = pycompat.sysbytes(email.charset.Charset(cs).input_charset.lower())
 
     # "latin1" normalizes to "iso8859-1", standard calls for "iso-8859-1"
-    if cs.startswith("iso") and not cs.startswith("iso-"):
-        return "iso-" + cs[3:]
+    if cs.startswith(b"iso") and not cs.startswith(b"iso-"):
+        return b"iso-" + cs[3:]
     return cs
 
 
-def mimetextpatch(s, subtype='plain', display=False):
+def mimetextpatch(s, subtype=b'plain', display=False):
     '''Return MIME message suitable for a patch.
     Charset will be detected by first trying to decode as us-ascii, then utf-8,
     and finally the global encodings. If all those fail, fall back to
     ISO-8859-1, an encoding with that allows all byte sequences.
     Transfer encodings will be used if necessary.'''
 
-    cs = ['us-ascii', 'utf-8', encoding.encoding, encoding.fallbackencoding]
+    cs = [b'us-ascii', b'utf-8', encoding.encoding, encoding.fallbackencoding]
     if display:
-        cs = ['us-ascii']
+        cs = [b'us-ascii']
     for charset in cs:
         try:
             s.decode(pycompat.sysstr(charset))
@@ -263,7 +263,7 @@
         except UnicodeDecodeError:
             pass
 
-    return mimetextqp(s, subtype, "iso-8859-1")
+    return mimetextqp(s, subtype, b"iso-8859-1")
 
 
 def mimetextqp(body, subtype, charset):
@@ -272,7 +272,7 @@
     '''
     cs = email.charset.Charset(charset)
     msg = email.message.Message()
-    msg.set_type(pycompat.sysstr('text/' + subtype))
+    msg.set_type(pycompat.sysstr(b'text/' + subtype))
 
     for line in body.splitlines():
         if len(line) > 950:
@@ -293,16 +293,16 @@
 
 def _charsets(ui):
     '''Obtains charsets to send mail parts not containing patches.'''
-    charsets = [cs.lower() for cs in ui.configlist('email', 'charsets')]
+    charsets = [cs.lower() for cs in ui.configlist(b'email', b'charsets')]
     fallbacks = [
         encoding.fallbackencoding.lower(),
         encoding.encoding.lower(),
-        'utf-8',
+        b'utf-8',
     ]
     for cs in fallbacks:  # find unique charsets while keeping order
         if cs not in charsets:
             charsets.append(cs)
-    return [cs for cs in charsets if not cs.endswith('ascii')]
+    return [cs for cs in charsets if not cs.endswith(b'ascii')]
 
 
 def _encode(ui, s, charsets):
@@ -322,7 +322,7 @@
             except UnicodeEncodeError:
                 pass
             except LookupError:
-                ui.warn(_('ignoring invalid sendcharset: %s\n') % ocs)
+                ui.warn(_(b'ignoring invalid sendcharset: %s\n') % ocs)
         else:
             # Everything failed, ascii-armor what we've got and send it.
             return s.encode('ascii', 'backslashreplace')
@@ -343,9 +343,9 @@
                 except UnicodeEncodeError:
                     pass
                 except LookupError:
-                    ui.warn(_('ignoring invalid sendcharset: %s\n') % ocs)
+                    ui.warn(_(b'ignoring invalid sendcharset: %s\n') % ocs)
     # if ascii, or all conversion attempts fail, send (broken) ascii
-    return s, 'us-ascii'
+    return s, b'us-ascii'
 
 
 def headencode(ui, s, charsets=None, display=False):
@@ -361,18 +361,18 @@
     assert isinstance(addr, bytes)
     name = headencode(ui, name, charsets)
     try:
-        acc, dom = addr.split('@')
+        acc, dom = addr.split(b'@')
         acc.decode('ascii')
         dom = dom.decode(pycompat.sysstr(encoding.encoding)).encode('idna')
-        addr = '%s@%s' % (acc, dom)
+        addr = b'%s@%s' % (acc, dom)
     except UnicodeDecodeError:
-        raise error.Abort(_('invalid email address: %s') % addr)
+        raise error.Abort(_(b'invalid email address: %s') % addr)
     except ValueError:
         try:
             # too strict?
             addr.decode('ascii')
         except UnicodeDecodeError:
-            raise error.Abort(_('invalid local address: %s') % addr)
+            raise error.Abort(_(b'invalid local address: %s') % addr)
     return pycompat.bytesurl(
         email.utils.formataddr((name, encoding.strfromlocal(addr)))
     )
@@ -381,7 +381,7 @@
 def addressencode(ui, address, charsets=None, display=False):
     '''Turns address into RFC-2047 compliant header.'''
     if display or not address:
-        return address or ''
+        return address or b''
     name, addr = email.utils.parseaddr(encoding.strfromlocal(address))
     return _addressencode(ui, name, encoding.strtolocal(addr), charsets)
 
@@ -408,10 +408,10 @@
 def mimeencode(ui, s, charsets=None, display=False):
     '''creates mime text object, encodes it if needed, and sets
     charset and transfer-encoding accordingly.'''
-    cs = 'us-ascii'
+    cs = b'us-ascii'
     if not display:
         s, cs = _encode(ui, s, charsets)
-    return mimetextqp(s, 'plain', cs)
+    return mimetextqp(s, b'plain', cs)
 
 
 if pycompat.ispy3:
--- a/mercurial/manifest.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/manifest.py	Sun Oct 06 09:48:39 2019 -0400
@@ -45,18 +45,18 @@
     # class exactly matches its C counterpart to try and help
     # prevent surprise breakage for anyone that develops against
     # the pure version.
-    if data and data[-1:] != '\n':
-        raise ValueError('Manifest did not end in a newline.')
+    if data and data[-1:] != b'\n':
+        raise ValueError(b'Manifest did not end in a newline.')
     prev = None
     for l in data.splitlines():
         if prev is not None and prev > l:
-            raise ValueError('Manifest lines not in sorted order.')
+            raise ValueError(b'Manifest lines not in sorted order.')
         prev = l
-        f, n = l.split('\0')
+        f, n = l.split(b'\0')
         if len(n) > 40:
             yield f, bin(n[:40]), n[40:]
         else:
-            yield f, bin(n), ''
+            yield f, bin(n), b''
 
 
 def _text(it):
@@ -66,10 +66,10 @@
         files.append(f)
         # if this is changed to support newlines in filenames,
         # be sure to check the templates/ dir again (especially *-raw.tmpl)
-        lines.append("%s\0%s%s\n" % (f, hex(n), fl))
+        lines.append(b"%s\0%s%s\n" % (f, hex(n), fl))
 
     _checkforbidden(files)
-    return ''.join(lines)
+    return b''.join(lines)
 
 
 class lazymanifestiter(object):
@@ -89,7 +89,7 @@
             self.pos += 1
             return data[0]
         self.pos += 1
-        zeropos = data.find('\x00', pos)
+        zeropos = data.find(b'\x00', pos)
         return data[pos:zeropos]
 
     __next__ = next
@@ -111,7 +111,7 @@
         if pos == -1:
             self.pos += 1
             return data
-        zeropos = data.find('\x00', pos)
+        zeropos = data.find(b'\x00', pos)
         hashval = unhexlify(data, self.lm.extrainfo[self.pos], zeropos + 1, 40)
         flags = self.lm._getflags(data, self.pos, zeropos)
         self.pos += 1
@@ -173,18 +173,18 @@
     def findlines(self, data):
         if not data:
             return []
-        pos = data.find("\n")
-        if pos == -1 or data[-1:] != '\n':
-            raise ValueError("Manifest did not end in a newline.")
+        pos = data.find(b"\n")
+        if pos == -1 or data[-1:] != b'\n':
+            raise ValueError(b"Manifest did not end in a newline.")
         positions = [0]
-        prev = data[: data.find('\x00')]
+        prev = data[: data.find(b'\x00')]
         while pos < len(data) - 1 and pos != -1:
             positions.append(pos + 1)
-            nexts = data[pos + 1 : data.find('\x00', pos + 1)]
+            nexts = data[pos + 1 : data.find(b'\x00', pos + 1)]
             if nexts < prev:
-                raise ValueError("Manifest lines not in sorted order.")
+                raise ValueError(b"Manifest lines not in sorted order.")
             prev = nexts
-            pos = data.find("\n", pos + 1)
+            pos = data.find(b"\n", pos + 1)
         return positions
 
     def _get(self, index):
@@ -198,7 +198,7 @@
 
     def _getkey(self, pos):
         if pos >= 0:
-            return self.data[pos : self.data.find('\x00', pos + 1)]
+            return self.data[pos : self.data.find(b'\x00', pos + 1)]
         return self.extradata[-pos - 1][0]
 
     def bsearch(self, key):
@@ -244,23 +244,23 @@
 
     def _getflags(self, data, needle, pos):
         start = pos + 41
-        end = data.find("\n", start)
+        end = data.find(b"\n", start)
         if end == -1:
             end = len(data) - 1
         if start == end:
-            return ''
+            return b''
         return self.data[start:end]
 
     def __getitem__(self, key):
         if not isinstance(key, bytes):
-            raise TypeError("getitem: manifest keys must be a bytes.")
+            raise TypeError(b"getitem: manifest keys must be a bytes.")
         needle = self.bsearch(key)
         if needle == -1:
             raise KeyError
         data, pos = self._get(needle)
         if pos == -1:
             return (data[1], data[2])
-        zeropos = data.find('\x00', pos)
+        zeropos = data.find(b'\x00', pos)
         assert 0 <= needle <= len(self.positions)
         assert len(self.extrainfo) == len(self.positions)
         hashval = unhexlify(data, self.extrainfo[needle], zeropos + 1, 40)
@@ -277,22 +277,24 @@
         if cur >= 0:
             # This does NOT unsort the list as far as the search functions are
             # concerned, as they only examine lines mapped by self.positions.
-            self.data = self.data[:cur] + '\x00' + self.data[cur + 1 :]
+            self.data = self.data[:cur] + b'\x00' + self.data[cur + 1 :]
             self.hasremovals = True
 
     def __setitem__(self, key, value):
         if not isinstance(key, bytes):
-            raise TypeError("setitem: manifest keys must be a byte string.")
+            raise TypeError(b"setitem: manifest keys must be a byte string.")
         if not isinstance(value, tuple) or len(value) != 2:
-            raise TypeError("Manifest values must be a tuple of (node, flags).")
+            raise TypeError(
+                b"Manifest values must be a tuple of (node, flags)."
+            )
         hashval = value[0]
         if not isinstance(hashval, bytes) or not 20 <= len(hashval) <= 22:
-            raise TypeError("node must be a 20-byte byte string")
+            raise TypeError(b"node must be a 20-byte byte string")
         flags = value[1]
         if len(hashval) == 22:
             hashval = hashval[:-1]
         if not isinstance(flags, bytes) or len(flags) > 1:
-            raise TypeError("flags must a 0 or 1 byte string, got %r", flags)
+            raise TypeError(b"flags must a 0 or 1 byte string, got %r", flags)
         needle, found = self.bsearch2(key)
         if found:
             # put the item
@@ -353,14 +355,14 @@
                     # before the next position.
                     if (
                         self.hasremovals
-                        and self.data.find('\n\x00', cur, self.positions[i])
+                        and self.data.find(b'\n\x00', cur, self.positions[i])
                         != -1
                     ):
                         break
 
                     offset += self.positions[i] - cur
                     cur = self.positions[i]
-                end_cut = self.data.find('\n', cur)
+                end_cut = self.data.find(b'\n', cur)
                 if end_cut != -1:
                     end_cut += 1
                 offset += end_cut - cur
@@ -375,12 +377,12 @@
                         self.extrainfo[i] = ord(t[1][21])
                     offset += len(l[-1])
                     i += 1
-        self.data = ''.join(l)
+        self.data = b''.join(l)
         self.hasremovals = False
         self.extradata = []
 
     def _pack(self, d):
-        return d[0] + '\x00' + hex(d[1][:20]) + d[2] + '\n'
+        return d[0] + b'\x00' + hex(d[1][:20]) + d[2] + b'\n'
 
     def text(self):
         self._compact()
@@ -393,7 +395,7 @@
 
         for fn, e1, flags in self.iterentries():
             if fn not in m2:
-                diff[fn] = (e1, flags), (None, '')
+                diff[fn] = (e1, flags), (None, b'')
             else:
                 e2 = m2[fn]
                 if (e1, flags) != e2:
@@ -403,7 +405,7 @@
 
         for fn, e2, flags in m2.iterentries():
             if fn not in self:
-                diff[fn] = (None, ''), (e2, flags)
+                diff[fn] = (None, b''), (e2, flags)
 
         return diff
 
@@ -421,7 +423,7 @@
 
     def filtercopy(self, filterfn):
         # XXX should be optimized
-        c = _lazymanifest('')
+        c = _lazymanifest(b'')
         for f, n, fl in self.iterentries():
             if filterfn(f):
                 c[f] = n, fl
@@ -436,7 +438,7 @@
 
 @interfaceutil.implementer(repository.imanifestdict)
 class manifestdict(object):
-    def __init__(self, data=''):
+    def __init__(self, data=b''):
         self._lm = _lazymanifest(data)
 
     def __getitem__(self, key):
@@ -456,7 +458,7 @@
     __bool__ = __nonzero__
 
     def __setitem__(self, key, node):
-        self._lm[key] = node, self.flags(key, '')
+        self._lm[key] = node, self.flags(key, b'')
 
     def __contains__(self, key):
         if key is None:
@@ -538,7 +540,7 @@
 
         # for dirstate.walk, files=[''] means "walk the whole tree".
         # follow that here, too
-        fset.discard('')
+        fset.discard(b'')
 
         for fn in sorted(fset):
             if not self.hasdir(fn):
@@ -591,7 +593,7 @@
         except KeyError:
             return default
 
-    def flags(self, key, default=''):
+    def flags(self, key, default=b''):
         try:
             return self._lm[key][1]
         except KeyError:
@@ -622,7 +624,7 @@
         delta = []
         dstart = None
         dend = None
-        dline = [""]
+        dline = [b""]
         start = 0
         # zero copy representation of base as a buffer
         addbuf = util.buffer(base)
@@ -636,14 +638,14 @@
                 start, end = _msearch(addbuf, f, start)
                 if not todelete:
                     h, fl = self._lm[f]
-                    l = "%s\0%s%s\n" % (f, hex(h), fl)
+                    l = b"%s\0%s%s\n" % (f, hex(h), fl)
                 else:
                     if start == end:
                         # item we want to delete was not found, error out
                         raise AssertionError(
-                            _("failed to remove %s from manifest") % f
+                            _(b"failed to remove %s from manifest") % f
                         )
-                    l = ""
+                    l = b""
                 if dstart is not None and dstart <= start and dend >= start:
                     if dend < end:
                         dend = end
@@ -651,13 +653,13 @@
                         dline.append(l)
                 else:
                     if dstart is not None:
-                        delta.append([dstart, dend, "".join(dline)])
+                        delta.append([dstart, dend, b"".join(dline)])
                     dstart = start
                     dend = end
                     dline = [l]
 
             if dstart is not None:
-                delta.append([dstart, dend, "".join(dline)])
+                delta.append([dstart, dend, b"".join(dline)])
             # apply the delta to the base, and get a delta for addrevision
             deltatext, arraytext = _addlistdelta(base, delta)
         else:
@@ -694,21 +696,21 @@
     while lo < hi:
         mid = (lo + hi) // 2
         start = mid
-        while start > 0 and m[start - 1 : start] != '\n':
+        while start > 0 and m[start - 1 : start] != b'\n':
             start -= 1
-        end = advance(start, '\0')
+        end = advance(start, b'\0')
         if bytes(m[start:end]) < s:
             # we know that after the null there are 40 bytes of sha1
             # this translates to the bisect lo = mid + 1
-            lo = advance(end + 40, '\n') + 1
+            lo = advance(end + 40, b'\n') + 1
         else:
             # this translates to the bisect hi = mid
             hi = start
-    end = advance(lo, '\0')
+    end = advance(lo, b'\0')
     found = m[lo:end]
     if s == found:
         # we know that after the null there are 40 bytes of sha1
-        end = advance(end + 40, '\n')
+        end = advance(end + 40, b'\n')
         return (lo, end + 1)
     else:
         return (lo, lo)
@@ -717,9 +719,9 @@
 def _checkforbidden(l):
     """Check filenames for illegal characters."""
     for f in l:
-        if '\n' in f or '\r' in f:
+        if b'\n' in f or b'\r' in f:
             raise error.StorageError(
-                _("'\\n' and '\\r' disallowed in filenames: %r")
+                _(b"'\\n' and '\\r' disallowed in filenames: %r")
                 % pycompat.bytestr(f)
             )
 
@@ -741,26 +743,26 @@
 
     newaddlist += addlist[currentposition:]
 
-    deltatext = "".join(
-        struct.pack(">lll", start, end, len(content)) + content
+    deltatext = b"".join(
+        struct.pack(b">lll", start, end, len(content)) + content
         for start, end, content in x
     )
     return deltatext, newaddlist
 
 
 def _splittopdir(f):
-    if '/' in f:
-        dir, subpath = f.split('/', 1)
-        return dir + '/', subpath
+    if b'/' in f:
+        dir, subpath = f.split(b'/', 1)
+        return dir + b'/', subpath
     else:
-        return '', f
+        return b'', f
 
 
 _noop = lambda s: None
 
 
 class treemanifest(object):
-    def __init__(self, dir='', text=''):
+    def __init__(self, dir=b'', text=b''):
         self._dir = dir
         self._node = nullid
         self._loadfunc = _noop
@@ -775,7 +777,7 @@
 
             def readsubtree(subdir, subm):
                 raise AssertionError(
-                    'treemanifest constructor only accepts ' 'flat manifests'
+                    b'treemanifest constructor only accepts ' b'flat manifests'
                 )
 
             self.parse(text, readsubtree)
@@ -806,13 +808,13 @@
     def _loadchildrensetlazy(self, visit):
         if not visit:
             return None
-        if visit == 'all' or visit == 'this':
+        if visit == b'all' or visit == b'this':
             self._loadalllazy()
             return None
 
         loadlazy = self._loadlazy
         for k in visit:
-            loadlazy(k + '/')
+            loadlazy(k + b'/')
         return visit
 
     def _loaddifflazy(self, t1, t2):
@@ -862,7 +864,7 @@
         return not self._dirs or all(m._isempty() for m in self._dirs.values())
 
     def __repr__(self):
-        return '<treemanifest dir=%s, node=%s, loaded=%s, dirty=%s at 0x%x>' % (
+        return b'<treemanifest dir=%s, node=%s, loaded=%s, dirty=%s at 0x%x>' % (
             self._dir,
             hex(self._node),
             bool(self._loadfunc is _noop),
@@ -893,7 +895,7 @@
             itertools.chain(self._dirs.items(), self._files.items())
         ):
             if p in self._files:
-                yield self._subpath(p), n, self._flags.get(p, '')
+                yield self._subpath(p), n, self._flags.get(p, b'')
             else:
                 for x in n.iterentries():
                     yield x
@@ -972,12 +974,12 @@
             self._loadlazy(dir)
 
             if dir not in self._dirs:
-                return ''
+                return b''
             return self._dirs[dir].flags(subpath)
         else:
             if f in self._lazydirs or f in self._dirs:
-                return ''
-            return self._flags.get(f, '')
+                return b''
+            return self._flags.get(f, b'')
 
     def find(self, f):
         self._load()
@@ -987,7 +989,7 @@
 
             return self._dirs[dir].find(subpath)
         else:
-            return self._files[f], self._flags.get(f, '')
+            return self._files[f], self._flags.get(f, b'')
 
     def __delitem__(self, f):
         self._load()
@@ -1109,7 +1111,7 @@
             if topdir in self._dirs:
                 return self._dirs[topdir].hasdir(subdir)
             return False
-        dirslash = dir + '/'
+        dirslash = dir + b'/'
         return dirslash in self._dirs or dirslash in self._lazydirs
 
     def walk(self, match):
@@ -1135,7 +1137,7 @@
 
         # for dirstate.walk, files=[''] means "walk the whole tree".
         # follow that here, too
-        fset.discard('')
+        fset.discard(b'')
 
         for fn in sorted(fset):
             if not self.hasdir(fn):
@@ -1172,7 +1174,7 @@
         '''
 
         visit = match.visitchildrenset(self._dir[:-1])
-        if visit == 'all':
+        if visit == b'all':
             return self.copy()
         ret = treemanifest(self._dir)
         if not visit:
@@ -1185,7 +1187,7 @@
             # If visit == 'this', we should obviously look at the files in this
             # directory; if visit is a set, and fn is in it, we should inspect
             # fn (but no need to inspect things not in the set).
-            if visit != 'this' and fn not in visit:
+            if visit != b'this' and fn not in visit:
                 continue
             fullp = self._subpath(fn)
             # visitchildrenset isn't perfect, we still need to call the regular
@@ -1248,9 +1250,9 @@
                     stack.append((emptytree, m2))
 
             for fn, n1 in t1._files.iteritems():
-                fl1 = t1._flags.get(fn, '')
+                fl1 = t1._flags.get(fn, b'')
                 n2 = t2._files.get(fn, None)
-                fl2 = t2._flags.get(fn, '')
+                fl2 = t2._flags.get(fn, b'')
                 if n1 != n2 or fl1 != fl2:
                     result[t1._subpath(fn)] = ((n1, fl1), (n2, fl2))
                 elif clean:
@@ -1258,8 +1260,8 @@
 
             for fn, n2 in t2._files.iteritems():
                 if fn not in t1._files:
-                    fl2 = t2._flags.get(fn, '')
-                    result[t2._subpath(fn)] = ((None, ''), (n2, fl2))
+                    fl2 = t2._flags.get(fn, b'')
+                    result[t2._subpath(fn)] = ((None, b''), (n2, fl2))
 
         stackls = []
         _iterativediff(self, m2, stackls)
@@ -1276,12 +1278,12 @@
         selflazy = self._lazydirs
         subpath = self._subpath
         for f, n, fl in _parse(text):
-            if fl == 't':
-                f = f + '/'
+            if fl == b't':
+                f = f + b'/'
                 # False below means "doesn't need to be copied" and can use the
                 # cached value from readsubtree directly.
                 selflazy[f] = (subpath(f), n, readsubtree, False)
-            elif '/' in f:
+            elif b'/' in f:
                 # This is a flat manifest, so use __setitem__ and setflag rather
                 # than assigning directly to _files and _flags, so we can
                 # assign a path in a subdirectory, and to mark dirty (compared
@@ -1307,8 +1309,8 @@
         """
         self._load()
         flags = self.flags
-        lazydirs = [(d[:-1], v[1], 't') for d, v in self._lazydirs.iteritems()]
-        dirs = [(d[:-1], self._dirs[d]._node, 't') for d in self._dirs]
+        lazydirs = [(d[:-1], v[1], b't') for d, v in self._lazydirs.iteritems()]
+        dirs = [(d[:-1], self._dirs[d]._node, b't') for d in self._dirs]
         files = [(f, self._files[f], flags(f)) for f in self._files]
         return _text(sorted(dirs + files + lazydirs))
 
@@ -1334,7 +1336,7 @@
         # let's skip investigating things that `match` says we do not need.
         visit = match.visitchildrenset(self._dir[:-1])
         visit = self._loadchildrensetlazy(visit)
-        if visit == 'this' or visit == 'all':
+        if visit == b'this' or visit == b'all':
             visit = None
         for d, subm in self._dirs.iteritems():
             if visit and d[:-1] not in visit:
@@ -1375,7 +1377,7 @@
 
     """
 
-    _file = 'manifestfulltextcache'
+    _file = b'manifestfulltextcache'
 
     def __init__(self, max):
         super(manifestfulltextcache, self).__init__(max)
@@ -1396,7 +1398,7 @@
                     if len(node) < 20:
                         break
                     try:
-                        size = struct.unpack('>L', fp.read(4))[0]
+                        size = struct.unpack(b'>L', fp.read(4))[0]
                     except struct.error:
                         break
                     value = bytearray(fp.read(size))
@@ -1415,13 +1417,13 @@
             return
         # rotate backwards to the first used node
         with self._opener(
-            self._file, 'w', atomictemp=True, checkambig=True
+            self._file, b'w', atomictemp=True, checkambig=True
         ) as fp:
             node = self._head.prev
             while True:
                 if node.key in self._cache:
                     fp.write(node.key)
-                    fp.write(struct.pack('>L', len(node.value)))
+                    fp.write(struct.pack(b'>L', len(node.value)))
                     fp.write(node.value)
                 if node is self._head:
                     break
@@ -1491,7 +1493,7 @@
     def __init__(
         self,
         opener,
-        tree='',
+        tree=b'',
         dirlogcache=None,
         indexfile=None,
         treemanifest=False,
@@ -1513,20 +1515,20 @@
         optiontreemanifest = False
         opts = getattr(opener, 'options', None)
         if opts is not None:
-            cachesize = opts.get('manifestcachesize', cachesize)
-            optiontreemanifest = opts.get('treemanifest', False)
+            cachesize = opts.get(b'manifestcachesize', cachesize)
+            optiontreemanifest = opts.get(b'treemanifest', False)
 
         self._treeondisk = optiontreemanifest or treemanifest
 
         self._fulltextcache = manifestfulltextcache(cachesize)
 
         if tree:
-            assert self._treeondisk, 'opts is %r' % opts
+            assert self._treeondisk, b'opts is %r' % opts
 
         if indexfile is None:
-            indexfile = '00manifest.i'
+            indexfile = b'00manifest.i'
             if tree:
-                indexfile = "meta/" + tree + indexfile
+                indexfile = b"meta/" + tree + indexfile
 
         self.tree = tree
 
@@ -1534,7 +1536,7 @@
         if tree:
             self._dirlogcache = dirlogcache
         else:
-            self._dirlogcache = {'': self}
+            self._dirlogcache = {b'': self}
 
         self._revlog = revlog.revlog(
             opener,
@@ -1551,7 +1553,7 @@
 
     def _setupmanifestcachehooks(self, repo):
         """Persist the manifestfulltextcache on lock release"""
-        if not util.safehasattr(repo, '_wlockref'):
+        if not util.safehasattr(repo, b'_wlockref'):
             return
 
         self._fulltextcache._opener = repo.wcachevfs
@@ -1604,7 +1606,7 @@
         readtree=None,
         match=None,
     ):
-        if p1 in self.fulltextcache and util.safehasattr(m, 'fastdelta'):
+        if p1 in self.fulltextcache and util.safehasattr(m, b'fastdelta'):
             # If our first parent is in the manifest cache, we can
             # compute a delta here using properties we know about the
             # manifest up-front, which may save time later for the
@@ -1629,8 +1631,8 @@
             # through to the revlog layer, and let it handle the delta
             # process.
             if self._treeondisk:
-                assert readtree, "readtree must be set for treemanifest writes"
-                assert match, "match must be specified for treemanifest writes"
+                assert readtree, b"readtree must be set for treemanifest writes"
+                assert match, b"match must be specified for treemanifest writes"
                 m1 = readtree(self.tree, p1)
                 m2 = readtree(self.tree, p2)
                 n = self._addtree(
@@ -1650,7 +1652,9 @@
     def _addtree(self, m, transaction, link, m1, m2, readtree, match):
         # If the manifest is unchanged compared to one parent,
         # don't write a new revision
-        if self.tree != '' and (m.unmodifiedsince(m1) or m.unmodifiedsince(m2)):
+        if self.tree != b'' and (
+            m.unmodifiedsince(m1) or m.unmodifiedsince(m2)
+        ):
             return m.node()
 
         def writesubtree(subm, subp1, subp2, match):
@@ -1670,7 +1674,7 @@
         m.writesubtrees(m1, m2, writesubtree, match)
         text = m.dirtext()
         n = None
-        if self.tree != '':
+        if self.tree != b'':
             # Double-check whether contents are unchanged to one parent
             if text == m1.dirtext():
                 n = m1.node()
@@ -1763,7 +1767,7 @@
 
     def clone(self, tr, destrevlog, **kwargs):
         if not isinstance(destrevlog, manifestrevlog):
-            raise error.ProgrammingError('expected manifestrevlog to clone()')
+            raise error.ProgrammingError(b'expected manifestrevlog to clone()')
 
         return self._revlog.clone(tr, destrevlog._revlog, **kwargs)
 
@@ -1816,8 +1820,8 @@
 
         opts = getattr(opener, 'options', None)
         if opts is not None:
-            usetreemanifest = opts.get('treemanifest', usetreemanifest)
-            cachesize = opts.get('manifestcachesize', cachesize)
+            usetreemanifest = opts.get(b'treemanifest', usetreemanifest)
+            cachesize = opts.get(b'manifestcachesize', cachesize)
 
         self._treemanifests = usetreemanifest
 
@@ -1827,7 +1831,7 @@
 
         # A cache of the manifestctx or treemanifestctx for each directory
         self._dirmancache = {}
-        self._dirmancache[''] = util.lrucachedict(cachesize)
+        self._dirmancache[b''] = util.lrucachedict(cachesize)
 
         self._cachesize = cachesize
 
@@ -1835,7 +1839,7 @@
         """Retrieves the manifest instance for the given node. Throws a
         LookupError if not found.
         """
-        return self.get('', node)
+        return self.get(b'', node)
 
     def get(self, tree, node, verify=True):
         """Retrieves the manifest instance for the given node. Throws a
@@ -1861,8 +1865,8 @@
             else:
                 raise error.Abort(
                     _(
-                        "cannot ask for manifest directory '%s' in a flat "
-                        "manifest"
+                        b"cannot ask for manifest directory '%s' in a flat "
+                        b"manifest"
                     )
                     % tree
                 )
@@ -1872,7 +1876,7 @@
                 self._rootstore.rev(node)
 
             if self._treemanifests:
-                m = treemanifestctx(self, '', node)
+                m = treemanifestctx(self, b'', node)
             else:
                 m = manifestctx(self, node)
 
@@ -2012,7 +2016,7 @@
 
 @interfaceutil.implementer(repository.imanifestrevisionwritable)
 class memtreemanifestctx(object):
-    def __init__(self, manifestlog, dir=''):
+    def __init__(self, manifestlog, dir=b''):
         self._manifestlog = manifestlog
         self._dir = dir
         self._treemanifest = treemanifest()
@@ -2020,7 +2024,7 @@
     def _storage(self):
         return self._manifestlog.getstorage(b'')
 
-    def new(self, dir=''):
+    def new(self, dir=b''):
         return memtreemanifestctx(self._manifestlog, dir=dir)
 
     def copy(self):
@@ -2105,7 +2109,7 @@
     def node(self):
         return self._node
 
-    def new(self, dir=''):
+    def new(self, dir=b''):
         return memtreemanifestctx(self._manifestlog, dir=dir)
 
     def copy(self):
@@ -2186,8 +2190,8 @@
         self._node = node
         # Add an empty file, which will be included by iterators and such,
         # appearing as the directory itself (i.e. something like "dir/")
-        self._files[''] = node
-        self._flags[''] = 't'
+        self._files[b''] = node
+        self._flags[b''] = b't'
 
     # Manifests outside the narrowspec should never be modified, so avoid
     # copying. This makes a noticeable difference when there are very many
@@ -2210,7 +2214,7 @@
 
     def write(self, *args):
         raise error.ProgrammingError(
-            'attempt to write manifest from excluded dir %s' % self._dir
+            b'attempt to write manifest from excluded dir %s' % self._dir
         )
 
 
@@ -2229,22 +2233,22 @@
 
     def __len__(self):
         raise error.ProgrammingError(
-            'attempt to get length of excluded dir %s' % self._dir
+            b'attempt to get length of excluded dir %s' % self._dir
         )
 
     def rev(self, node):
         raise error.ProgrammingError(
-            'attempt to get rev from excluded dir %s' % self._dir
+            b'attempt to get rev from excluded dir %s' % self._dir
         )
 
     def linkrev(self, node):
         raise error.ProgrammingError(
-            'attempt to get linkrev from excluded dir %s' % self._dir
+            b'attempt to get linkrev from excluded dir %s' % self._dir
         )
 
     def node(self, rev):
         raise error.ProgrammingError(
-            'attempt to get node from excluded dir %s' % self._dir
+            b'attempt to get node from excluded dir %s' % self._dir
         )
 
     def add(self, *args, **kwargs):
--- a/mercurial/match.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/match.py	Sun Oct 06 09:48:39 2019 -0400
@@ -26,21 +26,21 @@
 rustmod = policy.importrust(r'filepatterns')
 
 allpatternkinds = (
-    're',
-    'glob',
-    'path',
-    'relglob',
-    'relpath',
-    'relre',
-    'rootglob',
-    'listfile',
-    'listfile0',
-    'set',
-    'include',
-    'subinclude',
-    'rootfilesin',
+    b're',
+    b'glob',
+    b'path',
+    b'relglob',
+    b'relpath',
+    b'relre',
+    b'rootglob',
+    b'listfile',
+    b'listfile0',
+    b'set',
+    b'include',
+    b'subinclude',
+    b'rootfilesin',
 )
-cwdrelativepatternkinds = ('relpath', 'glob')
+cwdrelativepatternkinds = (b'relpath', b'glob')
 
 propertycache = util.propertycache
 
@@ -62,10 +62,10 @@
     other = []
 
     for kind, pat, source in kindpats:
-        if kind == 'set':
+        if kind == b'set':
             if ctx is None:
                 raise error.ProgrammingError(
-                    "fileset expression with no " "context"
+                    b"fileset expression with no " b"context"
                 )
             matchers.append(ctx.matchfileset(pat, badfn=badfn))
 
@@ -87,17 +87,17 @@
     other = []
 
     for kind, pat, source in kindpats:
-        if kind == 'subinclude':
+        if kind == b'subinclude':
             sourceroot = pathutil.dirname(util.normpath(source))
             pat = util.pconvert(pat)
             path = pathutil.join(sourceroot, pat)
 
             newroot = pathutil.dirname(path)
-            matcherargs = (newroot, '', [], ['include:%s' % path])
+            matcherargs = (newroot, b'', [], [b'include:%s' % path])
 
             prefix = pathutil.canonpath(root, root, newroot)
             if prefix:
-                prefix += '/'
+                prefix += b'/'
             relmatchers.append((prefix, matcherargs))
         else:
             other.append((kind, pat, source))
@@ -110,7 +110,7 @@
     'relpath:.' does.
     """
     for kind, pat, source in kindpats:
-        if pat != '' or kind not in ['relpath', 'glob']:
+        if pat != b'' or kind not in [b'relpath', b'glob']:
             return False
     return True
 
@@ -140,7 +140,7 @@
     patterns=None,
     include=None,
     exclude=None,
-    default='glob',
+    default=b'glob',
     auditor=None,
     ctx=None,
     listsubrepos=False,
@@ -236,7 +236,7 @@
             kp = _donormalize(patterns, default, root, cwd, auditor, warn)
             kindpats = []
             for kind, pats, source in kp:
-                if kind not in ('re', 'relre'):  # regex can't be normalized
+                if kind not in (b're', b'relre'):  # regex can't be normalized
                     p = pats
                     pats = dsnormalize(pats)
 
@@ -266,7 +266,7 @@
         m = alwaysmatcher(badfn)
 
     if include:
-        kindpats = normalize(include, 'glob', root, cwd, auditor, warn)
+        kindpats = normalize(include, b'glob', root, cwd, auditor, warn)
         im = _buildkindpatsmatcher(
             includematcher,
             root,
@@ -277,7 +277,7 @@
         )
         m = intersectmatchers(m, im)
     if exclude:
-        kindpats = normalize(exclude, 'glob', root, cwd, auditor, warn)
+        kindpats = normalize(exclude, b'glob', root, cwd, auditor, warn)
         em = _buildkindpatsmatcher(
             includematcher,
             root,
@@ -318,24 +318,24 @@
     for kind, pat in [_patsplit(p, default) for p in patterns]:
         if kind in cwdrelativepatternkinds:
             pat = pathutil.canonpath(root, cwd, pat, auditor=auditor)
-        elif kind in ('relglob', 'path', 'rootfilesin', 'rootglob'):
+        elif kind in (b'relglob', b'path', b'rootfilesin', b'rootglob'):
             pat = util.normpath(pat)
-        elif kind in ('listfile', 'listfile0'):
+        elif kind in (b'listfile', b'listfile0'):
             try:
                 files = util.readfile(pat)
-                if kind == 'listfile0':
-                    files = files.split('\0')
+                if kind == b'listfile0':
+                    files = files.split(b'\0')
                 else:
                     files = files.splitlines()
                 files = [f for f in files if f]
             except EnvironmentError:
-                raise error.Abort(_("unable to read file list (%s)") % pat)
+                raise error.Abort(_(b"unable to read file list (%s)") % pat)
             for k, p, source in _donormalize(
                 files, default, root, cwd, auditor, warn
             ):
                 kindpats.append((k, p, pat))
             continue
-        elif kind == 'include':
+        elif kind == b'include':
             try:
                 fullpath = os.path.join(root, util.localpath(pat))
                 includepats = readpatternfile(fullpath, warn)
@@ -344,16 +344,16 @@
                 ):
                     kindpats.append((k, p, source or pat))
             except error.Abort as inst:
-                raise error.Abort('%s: %s' % (pat, inst[0]))
+                raise error.Abort(b'%s: %s' % (pat, inst[0]))
             except IOError as inst:
                 if warn:
                     warn(
-                        _("skipping unreadable pattern file '%s': %s\n")
+                        _(b"skipping unreadable pattern file '%s': %s\n")
                         % (pat, stringutil.forcebytestr(inst.strerror))
                     )
             continue
         # else: re or relre - which cannot be normalized
-        kindpats.append((kind, pat, ''))
+        kindpats.append((kind, pat, b''))
     return kindpats
 
 
@@ -454,7 +454,7 @@
           equivalently that if there are files to investigate in 'dir' that it
           will always return 'this').
         '''
-        return 'this'
+        return b'this'
 
     def always(self):
         '''Matcher will match everything and .files() will be empty --
@@ -490,10 +490,10 @@
         return True
 
     def visitdir(self, dir):
-        return 'all'
+        return b'all'
 
     def visitchildrenset(self, dir):
-        return 'all'
+        return b'all'
 
     def __repr__(self):
         return r'<alwaysmatcher>'
@@ -539,16 +539,16 @@
         s = stringutil.buildrepr(self._predrepr) or pycompat.byterepr(
             self.matchfn
         )
-        return '<predicatenmatcher pred=%s>' % s
+        return b'<predicatenmatcher pred=%s>' % s
 
 
 def normalizerootdir(dir, funcname):
-    if dir == '.':
+    if dir == b'.':
         util.nouideprecwarn(
-            "match.%s() no longer accepts " "'.', use '' instead." % funcname,
-            '5.1',
+            b"match.%s() no longer accepts " b"'.', use '' instead." % funcname,
+            b'5.1',
         )
-        return ''
+        return b''
     return dir
 
 
@@ -590,16 +590,16 @@
 
         self._files = _explicitfiles(kindpats)
         self._prefix = _prefix(kindpats)
-        self._pats, self.matchfn = _buildmatch(kindpats, '$', root)
+        self._pats, self.matchfn = _buildmatch(kindpats, b'$', root)
 
     @propertycache
     def _dirs(self):
         return set(util.dirs(self._fileset))
 
     def visitdir(self, dir):
-        dir = normalizerootdir(dir, 'visitdir')
+        dir = normalizerootdir(dir, b'visitdir')
         if self._prefix and dir in self._fileset:
-            return 'all'
+            return b'all'
         return (
             dir in self._fileset
             or dir in self._dirs
@@ -611,18 +611,18 @@
     def visitchildrenset(self, dir):
         ret = self.visitdir(dir)
         if ret is True:
-            return 'this'
+            return b'this'
         elif not ret:
             return set()
-        assert ret == 'all'
-        return 'all'
+        assert ret == b'all'
+        return b'all'
 
     def prefix(self):
         return self._prefix
 
     @encoding.strmethod
     def __repr__(self):
-        return '<patternmatcher patterns=%r>' % pycompat.bytestr(self._pats)
+        return b'<patternmatcher patterns=%r>' % pycompat.bytestr(self._pats)
 
 
 # This is basically a reimplementation of util.dirs that stores the children
@@ -637,7 +637,7 @@
             addpath(f)
 
     def addpath(self, path):
-        if path == '':
+        if path == b'':
             return
         dirs = self._dirs
         findsplitdirs = _dirchildren._findsplitdirs
@@ -654,12 +654,12 @@
         # Unlike manifest._splittopdir, this does not suffix `dirname` with a
         # slash.
         oldpos = len(path)
-        pos = path.rfind('/')
+        pos = path.rfind(b'/')
         while pos != -1:
             yield path[:pos], path[pos + 1 : oldpos]
             oldpos = pos
-            pos = path.rfind('/', 0, pos)
-        yield '', path[:oldpos]
+            pos = path.rfind(b'/', 0, pos)
+        yield b'', path[:oldpos]
 
     def get(self, path):
         return self._dirs.get(path, set())
@@ -669,7 +669,7 @@
     def __init__(self, root, kindpats, badfn=None):
         super(includematcher, self).__init__(badfn)
 
-        self._pats, self.matchfn = _buildmatch(kindpats, '(?:/|$)', root)
+        self._pats, self.matchfn = _buildmatch(kindpats, b'(?:/|$)', root)
         self._prefix = _prefix(kindpats)
         roots, dirs, parents = _rootsdirsandparents(kindpats)
         # roots are directories which are recursively included.
@@ -681,9 +681,9 @@
         self._parents = parents
 
     def visitdir(self, dir):
-        dir = normalizerootdir(dir, 'visitdir')
+        dir = normalizerootdir(dir, b'visitdir')
         if self._prefix and dir in self._roots:
-            return 'all'
+            return b'all'
         return (
             dir in self._roots
             or dir in self._dirs
@@ -706,16 +706,16 @@
 
     def visitchildrenset(self, dir):
         if self._prefix and dir in self._roots:
-            return 'all'
+            return b'all'
         # Note: this does *not* include the 'dir in self._parents' case from
         # visitdir, that's handled below.
         if (
-            '' in self._roots
+            b'' in self._roots
             or dir in self._roots
             or dir in self._dirs
             or any(parentdir in self._roots for parentdir in util.finddirs(dir))
         ):
-            return 'this'
+            return b'this'
 
         if dir in self._parents:
             return self._allparentschildren.get(dir) or set()
@@ -723,7 +723,7 @@
 
     @encoding.strmethod
     def __repr__(self):
-        return '<includematcher includes=%r>' % pycompat.bytestr(self._pats)
+        return b'<includematcher includes=%r>' % pycompat.bytestr(self._pats)
 
 
 class exactmatcher(basematcher):
@@ -762,25 +762,25 @@
         return set(util.dirs(self._fileset))
 
     def visitdir(self, dir):
-        dir = normalizerootdir(dir, 'visitdir')
+        dir = normalizerootdir(dir, b'visitdir')
         return dir in self._dirs
 
     def visitchildrenset(self, dir):
-        dir = normalizerootdir(dir, 'visitchildrenset')
+        dir = normalizerootdir(dir, b'visitchildrenset')
 
         if not self._fileset or dir not in self._dirs:
             return set()
 
-        candidates = self._fileset | self._dirs - {''}
-        if dir != '':
-            d = dir + '/'
+        candidates = self._fileset | self._dirs - {b''}
+        if dir != b'':
+            d = dir + b'/'
             candidates = set(c[len(d) :] for c in candidates if c.startswith(d))
         # self._dirs includes all of the directories, recursively, so if
         # we're attempting to match foo/bar/baz.txt, it'll have '', 'foo',
         # 'foo/bar' in it. Thus we can safely ignore a candidate that has a
         # '/' in it, indicating a it's for a subdir-of-a-subdir; the
         # immediate subdir will be in there without a slash.
-        ret = {c for c in candidates if '/' not in c}
+        ret = {c for c in candidates if b'/' not in c}
         # We really do not expect ret to be empty, since that would imply that
         # there's something in _dirs that didn't have a file in _fileset.
         assert ret
@@ -791,7 +791,7 @@
 
     @encoding.strmethod
     def __repr__(self):
-        return '<exactmatcher files=%r>' % self._files
+        return b'<exactmatcher files=%r>' % self._files
 
 
 class differencematcher(basematcher):
@@ -825,7 +825,7 @@
         return self._m1.files()
 
     def visitdir(self, dir):
-        if self._m2.visitdir(dir) == 'all':
+        if self._m2.visitdir(dir) == b'all':
             return False
         elif not self._m2.visitdir(dir):
             # m2 does not match dir, we can return 'all' here if possible
@@ -834,7 +834,7 @@
 
     def visitchildrenset(self, dir):
         m2_set = self._m2.visitchildrenset(dir)
-        if m2_set == 'all':
+        if m2_set == b'all':
             return set()
         m1_set = self._m1.visitchildrenset(dir)
         # Possible values for m1: 'all', 'this', set(...), set()
@@ -844,11 +844,11 @@
         # return True, not 'all', for some reason.
         if not m2_set:
             return m1_set
-        if m1_set in ['all', 'this']:
+        if m1_set in [b'all', b'this']:
             # Never return 'all' here if m2_set is any kind of non-empty (either
             # 'this' or set(foo)), since m2 might return set() for a
             # subdirectory.
-            return 'this'
+            return b'this'
         # Possible values for m1:         set(...), set()
         # Possible values for m2: 'this', set(...)
         # We ignore m2's set results. They're possibly incorrect:
@@ -862,7 +862,7 @@
 
     @encoding.strmethod
     def __repr__(self):
-        return '<differencematcher m1=%r, m2=%r>' % (self._m1, self._m2)
+        return b'<differencematcher m1=%r, m2=%r>' % (self._m1, self._m2)
 
 
 def intersectmatchers(m1, m2):
@@ -914,7 +914,7 @@
 
     def visitdir(self, dir):
         visit1 = self._m1.visitdir(dir)
-        if visit1 == 'all':
+        if visit1 == b'all':
             return self._m2.visitdir(dir)
         # bool() because visit1=True + visit2='all' should not be 'all'
         return bool(visit1 and self._m2.visitdir(dir))
@@ -927,13 +927,13 @@
         if not m2_set:
             return set()
 
-        if m1_set == 'all':
+        if m1_set == b'all':
             return m2_set
-        elif m2_set == 'all':
+        elif m2_set == b'all':
             return m1_set
 
-        if m1_set == 'this' or m2_set == 'this':
-            return 'this'
+        if m1_set == b'this' or m2_set == b'this':
+            return b'this'
 
         assert isinstance(m1_set, set) and isinstance(m2_set, set)
         return m1_set.intersection(m2_set)
@@ -946,7 +946,7 @@
 
     @encoding.strmethod
     def __repr__(self):
-        return '<intersectionmatcher m1=%r, m2=%r>' % (self._m1, self._m2)
+        return b'<intersectionmatcher m1=%r, m2=%r>' % (self._m1, self._m2)
 
 
 class subdirmatcher(basematcher):
@@ -985,7 +985,7 @@
         self._files = [
             f[len(path) + 1 :]
             for f in matcher._files
-            if f.startswith(path + "/")
+            if f.startswith(path + b"/")
         ]
 
         # If the parent repo had a path to this subrepo and the matcher is
@@ -994,29 +994,29 @@
             self._always = any(f == path for f in matcher._files)
 
     def bad(self, f, msg):
-        self._matcher.bad(self._path + "/" + f, msg)
+        self._matcher.bad(self._path + b"/" + f, msg)
 
     def matchfn(self, f):
         # Some information is lost in the superclass's constructor, so we
         # can not accurately create the matching function for the subdirectory
         # from the inputs. Instead, we override matchfn() and visitdir() to
         # call the original matcher with the subdirectory path prepended.
-        return self._matcher.matchfn(self._path + "/" + f)
+        return self._matcher.matchfn(self._path + b"/" + f)
 
     def visitdir(self, dir):
-        dir = normalizerootdir(dir, 'visitdir')
-        if dir == '':
+        dir = normalizerootdir(dir, b'visitdir')
+        if dir == b'':
             dir = self._path
         else:
-            dir = self._path + "/" + dir
+            dir = self._path + b"/" + dir
         return self._matcher.visitdir(dir)
 
     def visitchildrenset(self, dir):
-        dir = normalizerootdir(dir, 'visitchildrenset')
-        if dir == '':
+        dir = normalizerootdir(dir, b'visitchildrenset')
+        if dir == b'':
             dir = self._path
         else:
-            dir = self._path + "/" + dir
+            dir = self._path + b"/" + dir
         return self._matcher.visitchildrenset(dir)
 
     def always(self):
@@ -1027,7 +1027,7 @@
 
     @encoding.strmethod
     def __repr__(self):
-        return '<subdirmatcher path=%r, matcher=%r>' % (
+        return b'<subdirmatcher path=%r, matcher=%r>' % (
             self._path,
             self._matcher,
         )
@@ -1069,9 +1069,9 @@
     def __init__(self, path, matcher, badfn=None):
         super(prefixdirmatcher, self).__init__(badfn)
         if not path:
-            raise error.ProgrammingError('prefix path must not be empty')
+            raise error.ProgrammingError(b'prefix path must not be empty')
         self._path = path
-        self._pathprefix = path + '/'
+        self._pathprefix = path + b'/'
         self._matcher = matcher
 
     @propertycache
@@ -1089,18 +1089,18 @@
 
     def visitdir(self, dir):
         if dir == self._path:
-            return self._matcher.visitdir('')
+            return self._matcher.visitdir(b'')
         if dir.startswith(self._pathprefix):
             return self._matcher.visitdir(dir[len(self._pathprefix) :])
         return dir in self._pathdirs
 
     def visitchildrenset(self, dir):
         if dir == self._path:
-            return self._matcher.visitchildrenset('')
+            return self._matcher.visitchildrenset(b'')
         if dir.startswith(self._pathprefix):
             return self._matcher.visitchildrenset(dir[len(self._pathprefix) :])
         if dir in self._pathdirs:
-            return 'this'
+            return b'this'
         return set()
 
     def isexact(self):
@@ -1111,7 +1111,7 @@
 
     @encoding.strmethod
     def __repr__(self):
-        return '<prefixdirmatcher path=%r, matcher=%r>' % (
+        return b'<prefixdirmatcher path=%r, matcher=%r>' % (
             pycompat.bytestr(self._path),
             self._matcher,
         )
@@ -1141,7 +1141,7 @@
         r = False
         for m in self._matchers:
             v = m.visitdir(dir)
-            if v == 'all':
+            if v == b'all':
                 return v
             r |= v
         return r
@@ -1153,21 +1153,21 @@
             v = m.visitchildrenset(dir)
             if not v:
                 continue
-            if v == 'all':
+            if v == b'all':
                 return v
-            if this or v == 'this':
+            if this or v == b'this':
                 this = True
                 # don't break, we might have an 'all' in here.
                 continue
             assert isinstance(v, set)
             r = r.union(v)
         if this:
-            return 'this'
+            return b'this'
         return r
 
     @encoding.strmethod
     def __repr__(self):
-        return '<unionmatcher matchers=%r>' % self._matchers
+        return b'<unionmatcher matchers=%r>' % self._matchers
 
 
 def patkind(pattern, default=None):
@@ -1189,8 +1189,8 @@
 def _patsplit(pattern, default):
     """Split a string into the optional pattern kind prefix and the actual
     pattern."""
-    if ':' in pattern:
-        kind, pat = pattern.split(':', 1)
+    if b':' in pattern:
+        kind, pat = pattern.split(b':', 1)
         if kind in allpatternkinds:
             return kind, pat
     return default, pattern
@@ -1220,7 +1220,7 @@
     \.\*\?
     '''
     i, n = 0, len(pat)
-    res = ''
+    res = b''
     group = 0
     escape = util.stringutil.regexbytesescapemap.get
 
@@ -1230,45 +1230,45 @@
     while i < n:
         c = pat[i : i + 1]
         i += 1
-        if c not in '*?[{},\\':
+        if c not in b'*?[{},\\':
             res += escape(c, c)
-        elif c == '*':
-            if peek() == '*':
+        elif c == b'*':
+            if peek() == b'*':
                 i += 1
-                if peek() == '/':
+                if peek() == b'/':
                     i += 1
-                    res += '(?:.*/)?'
+                    res += b'(?:.*/)?'
                 else:
-                    res += '.*'
+                    res += b'.*'
             else:
-                res += '[^/]*'
-        elif c == '?':
-            res += '.'
-        elif c == '[':
+                res += b'[^/]*'
+        elif c == b'?':
+            res += b'.'
+        elif c == b'[':
             j = i
-            if j < n and pat[j : j + 1] in '!]':
+            if j < n and pat[j : j + 1] in b'!]':
                 j += 1
-            while j < n and pat[j : j + 1] != ']':
+            while j < n and pat[j : j + 1] != b']':
                 j += 1
             if j >= n:
-                res += '\\['
+                res += b'\\['
             else:
-                stuff = pat[i:j].replace('\\', '\\\\')
+                stuff = pat[i:j].replace(b'\\', b'\\\\')
                 i = j + 1
-                if stuff[0:1] == '!':
-                    stuff = '^' + stuff[1:]
-                elif stuff[0:1] == '^':
-                    stuff = '\\' + stuff
-                res = '%s[%s]' % (res, stuff)
-        elif c == '{':
+                if stuff[0:1] == b'!':
+                    stuff = b'^' + stuff[1:]
+                elif stuff[0:1] == b'^':
+                    stuff = b'\\' + stuff
+                res = b'%s[%s]' % (res, stuff)
+        elif c == b'{':
             group += 1
-            res += '(?:'
-        elif c == '}' and group:
-            res += ')'
+            res += b'(?:'
+        elif c == b'}' and group:
+            res += b')'
             group -= 1
-        elif c == ',' and group:
-            res += '|'
-        elif c == '\\':
+        elif c == b',' and group:
+            res += b'|'
+        elif c == b'\\':
             p = peek()
             if p:
                 i += 1
@@ -1290,39 +1290,39 @@
             return rustmod.build_single_regex(kind, pat, globsuffix)
         except rustmod.PatternError:
             raise error.ProgrammingError(
-                'not a regex pattern: %s:%s' % (kind, pat)
+                b'not a regex pattern: %s:%s' % (kind, pat)
             )
 
-    if not pat and kind in ('glob', 'relpath'):
-        return ''
-    if kind == 're':
+    if not pat and kind in (b'glob', b'relpath'):
+        return b''
+    if kind == b're':
         return pat
-    if kind in ('path', 'relpath'):
-        if pat == '.':
-            return ''
-        return util.stringutil.reescape(pat) + '(?:/|$)'
-    if kind == 'rootfilesin':
-        if pat == '.':
-            escaped = ''
+    if kind in (b'path', b'relpath'):
+        if pat == b'.':
+            return b''
+        return util.stringutil.reescape(pat) + b'(?:/|$)'
+    if kind == b'rootfilesin':
+        if pat == b'.':
+            escaped = b''
         else:
             # Pattern is a directory name.
-            escaped = util.stringutil.reescape(pat) + '/'
+            escaped = util.stringutil.reescape(pat) + b'/'
         # Anything after the pattern must be a non-directory.
-        return escaped + '[^/]+$'
-    if kind == 'relglob':
+        return escaped + b'[^/]+$'
+    if kind == b'relglob':
         globre = _globre(pat)
-        if globre.startswith('[^/]*'):
+        if globre.startswith(b'[^/]*'):
             # When pat has the form *XYZ (common), make the returned regex more
             # legible by returning the regex for **XYZ instead of **/*XYZ.
-            return '.*' + globre[len('[^/]*') :] + globsuffix
-        return '(?:|.*/)' + globre + globsuffix
-    if kind == 'relre':
-        if pat.startswith('^'):
+            return b'.*' + globre[len(b'[^/]*') :] + globsuffix
+        return b'(?:|.*/)' + globre + globsuffix
+    if kind == b'relre':
+        if pat.startswith(b'^'):
             return pat
-        return '.*' + pat
-    if kind in ('glob', 'rootglob'):
+        return b'.*' + pat
+    if kind in (b'glob', b'rootglob'):
         return _globre(pat) + globsuffix
-    raise error.ProgrammingError('not a regex pattern: %s:%s' % (kind, pat))
+    raise error.ProgrammingError(b'not a regex pattern: %s:%s' % (kind, pat))
 
 
 def _buildmatch(kindpats, globsuffix, root):
@@ -1348,17 +1348,17 @@
 
         matchfuncs.append(matchsubinclude)
 
-    regex = ''
+    regex = b''
     if kindpats:
-        if all(k == 'rootfilesin' for k, p, s in kindpats):
+        if all(k == b'rootfilesin' for k, p, s in kindpats):
             dirs = {p for k, p, s in kindpats}
 
             def mf(f):
-                i = f.rfind('/')
+                i = f.rfind(b'/')
                 if i >= 0:
                     dir = f[:i]
                 else:
-                    dir = '.'
+                    dir = b'.'
                 return dir in dirs
 
             regex = b'rootfilesin: %s' % stringutil.pprint(list(sorted(dirs)))
@@ -1378,7 +1378,7 @@
 
 def _joinregexes(regexps):
     """gather multiple regular expressions into a single one"""
-    return '|'.join(regexps)
+    return b'|'.join(regexps)
 
 
 def _buildregexmatch(kindpats, globsuffix):
@@ -1403,7 +1403,7 @@
         for idx, r in enumerate(regexps):
             piecesize = len(r)
             if piecesize > MAX_RE_SIZE:
-                msg = _("matcher pattern is too long (%d bytes)") % piecesize
+                msg = _(b"matcher pattern is too long (%d bytes)") % piecesize
                 raise error.Abort(msg)
             elif (groupsize + piecesize) > MAX_RE_SIZE:
                 group = regexps[startidx:idx]
@@ -1428,11 +1428,11 @@
             except re.error:
                 if s:
                     raise error.Abort(
-                        _("%s: invalid pattern (%s): %s") % (s, k, p)
+                        _(b"%s: invalid pattern (%s): %s") % (s, k, p)
                     )
                 else:
-                    raise error.Abort(_("invalid pattern (%s): %s") % (k, p))
-        raise error.Abort(_("invalid pattern"))
+                    raise error.Abort(_(b"invalid pattern (%s): %s") % (k, p))
+        raise error.Abort(_(b"invalid pattern"))
 
 
 def _patternrootsanddirs(kindpats):
@@ -1446,23 +1446,23 @@
     r = []
     d = []
     for kind, pat, source in kindpats:
-        if kind in ('glob', 'rootglob'):  # find the non-glob prefix
+        if kind in (b'glob', b'rootglob'):  # find the non-glob prefix
             root = []
-            for p in pat.split('/'):
-                if '[' in p or '{' in p or '*' in p or '?' in p:
+            for p in pat.split(b'/'):
+                if b'[' in p or b'{' in p or b'*' in p or b'?' in p:
                     break
                 root.append(p)
-            r.append('/'.join(root))
-        elif kind in ('relpath', 'path'):
-            if pat == '.':
-                pat = ''
+            r.append(b'/'.join(root))
+        elif kind in (b'relpath', b'path'):
+            if pat == b'.':
+                pat = b''
             r.append(pat)
-        elif kind in ('rootfilesin',):
-            if pat == '.':
-                pat = ''
+        elif kind in (b'rootfilesin',):
+            if pat == b'.':
+                pat = b''
             d.append(pat)
         else:  # relglob, re, relre
-            r.append('')
+            r.append(b'')
     return r, d
 
 
@@ -1526,14 +1526,14 @@
     '''
     # Keep only the pattern kinds where one can specify filenames (vs only
     # directory names).
-    filable = [kp for kp in kindpats if kp[0] not in ('rootfilesin',)]
+    filable = [kp for kp in kindpats if kp[0] not in (b'rootfilesin',)]
     return _roots(filable)
 
 
 def _prefix(kindpats):
     '''Whether all the patterns match a prefix (i.e. recursively)'''
     for kind, pat, source in kindpats:
-        if kind not in ('path', 'relpath'):
+        if kind not in (b'path', b'relpath'):
             return False
     return True
 
@@ -1573,24 +1573,24 @@
         for warning_params in warnings:
             # Can't be easily emitted from Rust, because it would require
             # a mechanism for both gettext and calling the `warn` function.
-            warn(_("%s: ignoring invalid syntax '%s'\n") % warning_params)
+            warn(_(b"%s: ignoring invalid syntax '%s'\n") % warning_params)
 
         return result
 
     syntaxes = {
-        're': 'relre:',
-        'regexp': 'relre:',
-        'glob': 'relglob:',
-        'rootglob': 'rootglob:',
-        'include': 'include',
-        'subinclude': 'subinclude',
+        b're': b'relre:',
+        b'regexp': b'relre:',
+        b'glob': b'relglob:',
+        b'rootglob': b'rootglob:',
+        b'include': b'include',
+        b'subinclude': b'subinclude',
     }
-    syntax = 'relre:'
+    syntax = b'relre:'
     patterns = []
 
-    fp = open(filepath, 'rb')
+    fp = open(filepath, b'rb')
     for lineno, line in enumerate(util.iterfile(fp), start=1):
-        if "#" in line:
+        if b"#" in line:
             global _commentre
             if not _commentre:
                 _commentre = util.re.compile(br'((?:^|[^\\])(?:\\\\)*)#.*')
@@ -1599,19 +1599,19 @@
             if m:
                 line = line[: m.end(1)]
             # fixup properly escaped comments that survived the above
-            line = line.replace("\\#", "#")
+            line = line.replace(b"\\#", b"#")
         line = line.rstrip()
         if not line:
             continue
 
-        if line.startswith('syntax:'):
+        if line.startswith(b'syntax:'):
             s = line[7:].strip()
             try:
                 syntax = syntaxes[s]
             except KeyError:
                 if warn:
                     warn(
-                        _("%s: ignoring invalid syntax '%s'\n") % (filepath, s)
+                        _(b"%s: ignoring invalid syntax '%s'\n") % (filepath, s)
                     )
             continue
 
@@ -1621,7 +1621,7 @@
                 linesyntax = rels
                 line = line[len(rels) :]
                 break
-            elif line.startswith(s + ':'):
+            elif line.startswith(s + b':'):
                 linesyntax = rels
                 line = line[len(s) + 1 :]
                 break
--- a/mercurial/mdiff.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/mdiff.py	Sun Oct 06 09:48:39 2019 -0400
@@ -21,7 +21,7 @@
 )
 from .utils import dateutil
 
-_missing_newline_marker = "\\ No newline at end of file\n"
+_missing_newline_marker = b"\\ No newline at end of file\n"
 
 bdiff = policy.importmod(r'bdiff')
 mpatch = policy.importmod(r'mpatch')
@@ -49,22 +49,22 @@
     '''
 
     defaults = {
-        'context': 3,
-        'text': False,
-        'showfunc': False,
-        'git': False,
-        'nodates': False,
-        'nobinary': False,
-        'noprefix': False,
-        'index': 0,
-        'ignorews': False,
-        'ignorewsamount': False,
-        'ignorewseol': False,
-        'ignoreblanklines': False,
-        'upgrade': False,
-        'showsimilarity': False,
-        'worddiff': False,
-        'xdiff': False,
+        b'context': 3,
+        b'text': False,
+        b'showfunc': False,
+        b'git': False,
+        b'nodates': False,
+        b'nobinary': False,
+        b'noprefix': False,
+        b'index': 0,
+        b'ignorews': False,
+        b'ignorewsamount': False,
+        b'ignorewseol': False,
+        b'ignoreblanklines': False,
+        b'upgrade': False,
+        b'showsimilarity': False,
+        b'worddiff': False,
+        b'xdiff': False,
     }
 
     def __init__(self, **opts):
@@ -79,7 +79,7 @@
             self.context = int(self.context)
         except ValueError:
             raise error.Abort(
-                _('diff context lines count must be ' 'an integer, not %r')
+                _(b'diff context lines count must be ' b'an integer, not %r')
                 % pycompat.bytestr(self.context)
             )
 
@@ -99,7 +99,7 @@
     elif opts.ignorewsamount:
         text = bdiff.fixws(text, 0)
     if blank and opts.ignoreblanklines:
-        text = re.sub('\n+', '\n', text).strip('\n')
+        text = re.sub(b'\n+', b'\n', text).strip(b'\n')
     if opts.ignorewseol:
         text = re.sub(br'[ \t\r\f]+\n', br'\n', text)
     return text
@@ -113,10 +113,10 @@
     s1, e1 = 0, len(lines1)
     s2, e2 = 0, len(lines2)
     while s1 < e1 or s2 < e2:
-        i1, i2, btype = s1, s2, '='
+        i1, i2, btype = s1, s2, b'='
         if i1 >= e1 or lines1[i1] == 0 or i2 >= e2 or lines2[i2] == 0:
             # Consume the block of blank lines
-            btype = '~'
+            btype = b'~'
             while i1 < e1 and lines1[i1] == 0:
                 i1 += 1
             while i2 < e2 and lines2[i2] == 0:
@@ -174,26 +174,26 @@
     filteredblocks = []
     for block in blocks:
         (a1, a2, b1, b2), stype = block
-        if lbb >= b1 and ubb <= b2 and stype == '=':
+        if lbb >= b1 and ubb <= b2 and stype == b'=':
             # rangeb is within a single "=" hunk, restrict back linerange1
             # by offsetting rangeb
             lba = lbb - b1 + a1
             uba = ubb - b1 + a1
         else:
             if b1 <= lbb < b2:
-                if stype == '=':
+                if stype == b'=':
                     lba = a2 - (b2 - lbb)
                 else:
                     lba = a1
             if b1 < ubb <= b2:
-                if stype == '=':
+                if stype == b'=':
                     uba = a1 + (ubb - b1)
                 else:
                     uba = a2
         if hunkinrange((b1, (b2 - b1)), rangeb):
             filteredblocks.append(block)
     if lba is None or uba is None or uba < lba:
-        raise error.Abort(_('line range exceeds file size'))
+        raise error.Abort(_(b'line range exceeds file size'))
     return filteredblocks, (lba, uba)
 
 
@@ -201,7 +201,7 @@
     if (
         opts is None
         or not opts.xdiff
-        or not util.safehasattr(bdiff, 'xdiffblocks')
+        or not util.safehasattr(bdiff, b'xdiffblocks')
     ):
         return bdiff.blocks
     else:
@@ -236,18 +236,18 @@
         # bdiff sometimes gives huge matches past eof, this check eats them,
         # and deals with the special first match case described above
         if s[0] != s[1] or s[2] != s[3]:
-            type = '!'
+            type = b'!'
             if opts.ignoreblanklines:
                 if lines1 is None:
                     lines1 = splitnewlines(text1)
                 if lines2 is None:
                     lines2 = splitnewlines(text2)
-                old = wsclean(opts, "".join(lines1[s[0] : s[1]]))
-                new = wsclean(opts, "".join(lines2[s[2] : s[3]]))
+                old = wsclean(opts, b"".join(lines1[s[0] : s[1]]))
+                new = wsclean(opts, b"".join(lines2[s[2] : s[3]]))
                 if old == new:
-                    type = '~'
+                    type = b'~'
             yield s, type
-        yield s1, '='
+        yield s1, b'='
 
 
 def unidiff(a, ad, b, bd, fn1, fn2, binary, opts=defaultopts):
@@ -263,20 +263,20 @@
 
     def datetag(date, fn=None):
         if not opts.git and not opts.nodates:
-            return '\t%s' % date
-        if fn and ' ' in fn:
-            return '\t'
-        return ''
+            return b'\t%s' % date
+        if fn and b' ' in fn:
+            return b'\t'
+        return b''
 
     sentinel = [], ()
     if not a and not b:
         return sentinel
 
     if opts.noprefix:
-        aprefix = bprefix = ''
+        aprefix = bprefix = b''
     else:
-        aprefix = 'a/'
-        bprefix = 'b/'
+        aprefix = b'a/'
+        bprefix = b'b/'
 
     epoch = dateutil.datestr((0, 0))
 
@@ -287,37 +287,37 @@
         if a and b and len(a) == len(b) and a == b:
             return sentinel
         headerlines = []
-        hunks = ((None, ['Binary file %s has changed\n' % fn1]),)
+        hunks = ((None, [b'Binary file %s has changed\n' % fn1]),)
     elif not a:
-        without_newline = not b.endswith('\n')
+        without_newline = not b.endswith(b'\n')
         b = splitnewlines(b)
         if a is None:
-            l1 = '--- /dev/null%s' % datetag(epoch)
+            l1 = b'--- /dev/null%s' % datetag(epoch)
         else:
-            l1 = "--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1))
-        l2 = "+++ %s%s" % (bprefix + fn2, datetag(bd, fn2))
+            l1 = b"--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1))
+        l2 = b"+++ %s%s" % (bprefix + fn2, datetag(bd, fn2))
         headerlines = [l1, l2]
         size = len(b)
         hunkrange = (0, 0, 1, size)
-        hunklines = ["@@ -0,0 +1,%d @@\n" % size] + ["+" + e for e in b]
+        hunklines = [b"@@ -0,0 +1,%d @@\n" % size] + [b"+" + e for e in b]
         if without_newline:
-            hunklines[-1] += '\n'
+            hunklines[-1] += b'\n'
             hunklines.append(_missing_newline_marker)
         hunks = ((hunkrange, hunklines),)
     elif not b:
-        without_newline = not a.endswith('\n')
+        without_newline = not a.endswith(b'\n')
         a = splitnewlines(a)
-        l1 = "--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1))
+        l1 = b"--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1))
         if b is None:
-            l2 = '+++ /dev/null%s' % datetag(epoch)
+            l2 = b'+++ /dev/null%s' % datetag(epoch)
         else:
-            l2 = "+++ %s%s%s" % (bprefix, fn2, datetag(bd, fn2))
+            l2 = b"+++ %s%s%s" % (bprefix, fn2, datetag(bd, fn2))
         headerlines = [l1, l2]
         size = len(a)
         hunkrange = (1, size, 0, 0)
-        hunklines = ["@@ -1,%d +0,0 @@\n" % size] + ["-" + e for e in a]
+        hunklines = [b"@@ -1,%d +0,0 @@\n" % size] + [b"-" + e for e in a]
         if without_newline:
-            hunklines[-1] += '\n'
+            hunklines[-1] += b'\n'
             hunklines.append(_missing_newline_marker)
         hunks = ((hunkrange, hunklines),)
     else:
@@ -326,8 +326,8 @@
             return sentinel
 
         headerlines = [
-            "--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1)),
-            "+++ %s%s%s" % (bprefix, fn2, datetag(bd, fn2)),
+            b"--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1)),
+            b"+++ %s%s%s" % (bprefix, fn2, datetag(bd, fn2)),
         ]
 
     return headerlines, hunks
@@ -359,7 +359,7 @@
             return 0
         return ret
 
-    lastfunc = [0, '']
+    lastfunc = [0, b'']
 
     def yieldhunk(hunk):
         (astart, a2, bstart, b2, delta) = hunk
@@ -367,7 +367,7 @@
         alen = aend - astart
         blen = b2 - bstart + aend - a2
 
-        func = ""
+        func = b""
         if opts.showfunc:
             lastpos, func = lastfunc
             # walk backwards from the start of the context up to the start of
@@ -395,9 +395,9 @@
 
         hunkrange = astart, alen, bstart, blen
         hunklines = (
-            ["@@ -%d,%d +%d,%d @@%s\n" % (hunkrange + (func,))]
+            [b"@@ -%d,%d +%d,%d @@%s\n" % (hunkrange + (func,))]
             + delta
-            + [' ' + l1[x] for x in pycompat.xrange(a2, aend)]
+            + [b' ' + l1[x] for x in pycompat.xrange(a2, aend)]
         )
         # If either file ends without a newline and the last line of
         # that file is part of a hunk, a marker is printed. If the
@@ -405,18 +405,18 @@
         # a newline, print only one marker. That's the only case in
         # which the hunk can end in a shared line without a newline.
         skip = False
-        if not t1.endswith('\n') and astart + alen == len(l1) + 1:
+        if not t1.endswith(b'\n') and astart + alen == len(l1) + 1:
             for i in pycompat.xrange(len(hunklines) - 1, -1, -1):
-                if hunklines[i].startswith(('-', ' ')):
-                    if hunklines[i].startswith(' '):
+                if hunklines[i].startswith((b'-', b' ')):
+                    if hunklines[i].startswith(b' '):
                         skip = True
-                    hunklines[i] += '\n'
+                    hunklines[i] += b'\n'
                     hunklines.insert(i + 1, _missing_newline_marker)
                     break
-        if not skip and not t2.endswith('\n') and bstart + blen == len(l2) + 1:
+        if not skip and not t2.endswith(b'\n') and bstart + blen == len(l2) + 1:
             for i in pycompat.xrange(len(hunklines) - 1, -1, -1):
-                if hunklines[i].startswith('+'):
-                    hunklines[i] += '\n'
+                if hunklines[i].startswith(b'+'):
+                    hunklines[i] += b'\n'
                     hunklines.insert(i + 1, _missing_newline_marker)
                     break
         yield hunkrange, hunklines
@@ -430,8 +430,8 @@
     has_hunks = False
     for s, stype in allblocks(t1, t2, opts, l1, l2):
         a1, a2, b1, b2 = s
-        if stype != '!':
-            if stype == '~':
+        if stype != b'!':
+            if stype == b'~':
                 # The diff context lines are based on t1 content. When
                 # blank lines are ignored, the new lines offsets must
                 # be adjusted as if equivalent blocks ('~') had the
@@ -468,9 +468,9 @@
             # create a new hunk
             hunk = [astart, a2, bstart, b2, delta]
 
-        delta[len(delta) :] = [' ' + x for x in l1[astart:a1]]
-        delta[len(delta) :] = ['-' + x for x in old]
-        delta[len(delta) :] = ['+' + x for x in new]
+        delta[len(delta) :] = [b' ' + x for x in l1[astart:a1]]
+        delta[len(delta) :] = [b'-' + x for x in old]
+        delta[len(delta) :] = [b'+' + x for x in new]
 
     if hunk:
         if not has_hunks:
@@ -488,10 +488,10 @@
     def fmtline(line):
         l = len(line)
         if l <= 26:
-            l = pycompat.bytechr(ord('A') + l - 1)
+            l = pycompat.bytechr(ord(b'A') + l - 1)
         else:
-            l = pycompat.bytechr(l - 26 + ord('a') - 1)
-        return '%c%s\n' % (l, util.b85encode(line, True))
+            l = pycompat.bytechr(l - 26 + ord(b'a') - 1)
+        return b'%c%s\n' % (l, util.b85encode(line, True))
 
     def chunk(text, csize=52):
         l = len(text)
@@ -501,33 +501,33 @@
             i += csize
 
     if to is None:
-        to = ''
+        to = b''
     if tn is None:
-        tn = ''
+        tn = b''
 
     if to == tn:
-        return ''
+        return b''
 
     # TODO: deltas
     ret = []
-    ret.append('GIT binary patch\n')
-    ret.append('literal %d\n' % len(tn))
+    ret.append(b'GIT binary patch\n')
+    ret.append(b'literal %d\n' % len(tn))
     for l in chunk(zlib.compress(tn)):
         ret.append(fmtline(l))
-    ret.append('\n')
+    ret.append(b'\n')
 
-    return ''.join(ret)
+    return b''.join(ret)
 
 
 def patchtext(bin):
     pos = 0
     t = []
     while pos < len(bin):
-        p1, p2, l = struct.unpack(">lll", bin[pos : pos + 12])
+        p1, p2, l = struct.unpack(b">lll", bin[pos : pos + 12])
         pos += 12
         t.append(bin[pos : pos + l])
         pos += l
-    return "".join(t)
+    return b"".join(t)
 
 
 def patch(a, bin):
@@ -543,8 +543,8 @@
 
 
 def trivialdiffheader(length):
-    return struct.pack(">lll", 0, 0, length) if length else ''
+    return struct.pack(b">lll", 0, 0, length) if length else b''
 
 
 def replacediffheader(oldlen, newlen):
-    return struct.pack(">lll", 0, oldlen, newlen)
+    return struct.pack(b">lll", 0, oldlen, newlen)
--- a/mercurial/merge.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/merge.py	Sun Oct 06 09:48:39 2019 -0400
@@ -44,9 +44,9 @@
 
 def _droponode(data):
     # used for compatibility for v1
-    bits = data.split('\0')
+    bits = data.split(b'\0')
     bits = bits[:-2] + bits[-1:]
-    return '\0'.join(bits)
+    return b'\0'.join(bits)
 
 
 # Merge state record types. See ``mergestate`` docs for more.
@@ -138,8 +138,8 @@
     'pu' and 'pr' for path conflicts.
     '''
 
-    statepathv1 = 'merge/state'
-    statepathv2 = 'merge/state2'
+    statepathv1 = b'merge/state'
+    statepathv2 = b'merge/state2'
 
     @staticmethod
     def clean(repo, node=None, other=None, labels=None):
@@ -170,7 +170,7 @@
         self._local = None
         self._other = None
         self._labels = labels
-        for var in ('localctx', 'otherctx'):
+        for var in (b'localctx', b'otherctx'):
             if var in vars(self):
                 delattr(self, var)
         if node:
@@ -181,7 +181,7 @@
             self._mdstate = MERGE_DRIVER_STATE_SUCCESS
         else:
             self._mdstate = MERGE_DRIVER_STATE_UNMARKED
-        shutil.rmtree(self._repo.vfs.join('merge'), True)
+        shutil.rmtree(self._repo.vfs.join(b'merge'), True)
         self._results = {}
         self._dirty = False
 
@@ -195,7 +195,7 @@
         self._stateextras = {}
         self._local = None
         self._other = None
-        for var in ('localctx', 'otherctx'):
+        for var in (b'localctx', b'otherctx'):
             if var in vars(self):
                 delattr(self, var)
         self._readmergedriver = None
@@ -208,7 +208,7 @@
             elif rtype == RECORD_OTHER:
                 self._other = bin(record)
             elif rtype == RECORD_MERGE_DRIVER_STATE:
-                bits = record.split('\0', 1)
+                bits = record.split(b'\0', 1)
                 mdstate = bits[1]
                 if len(mdstate) != 1 or mdstate not in (
                     MERGE_DRIVER_STATE_UNMARKED,
@@ -226,11 +226,11 @@
                 RECORD_PATH_CONFLICT,
                 RECORD_MERGE_DRIVER_MERGE,
             ):
-                bits = record.split('\0')
+                bits = record.split(b'\0')
                 self._state[bits[0]] = bits[1:]
             elif rtype == RECORD_FILE_VALUES:
-                filename, rawextras = record.split('\0', 1)
-                extraparts = rawextras.split('\0')
+                filename, rawextras = record.split(b'\0', 1)
+                extraparts = rawextras.split(b'\0')
                 extras = {}
                 i = 0
                 while i < len(extraparts):
@@ -239,7 +239,7 @@
 
                 self._stateextras[filename] = extras
             elif rtype == RECORD_LABELS:
-                labels = record.split('\0', 2)
+                labels = record.split(b'\0', 2)
                 self._labels = [l for l in labels if len(l) > 0]
             elif not rtype.islower():
                 unsupported.add(rtype)
@@ -278,9 +278,9 @@
             # if mctx was wrong `mctx[bits[-2]]` may fails.
             for idx, r in enumerate(v1records):
                 if r[0] == RECORD_MERGED:
-                    bits = r[1].split('\0')
-                    bits.insert(-2, '')
-                    v1records[idx] = (r[0], '\0'.join(bits))
+                    bits = r[1].split(b'\0')
+                    bits.insert(-2, b'')
+                    v1records[idx] = (r[0], b'\0'.join(bits))
             return v1records
 
     def _v1v2match(self, v1records, v2records):
@@ -346,7 +346,7 @@
             while off < end:
                 rtype = data[off : off + 1]
                 off += 1
-                length = _unpack('>I', data[off : (off + 4)])[0]
+                length = _unpack(b'>I', data[off : (off + 4)])[0]
                 off += 4
                 record = data[off : (off + length)]
                 off += length
@@ -369,14 +369,16 @@
         # - B inspects .hgrc and finds it to be clean
         # - B then continues the merge and the malicious merge driver
         #  gets invoked
-        configmergedriver = self._repo.ui.config('experimental', 'mergedriver')
+        configmergedriver = self._repo.ui.config(
+            b'experimental', b'mergedriver'
+        )
         if (
             self._readmergedriver is not None
             and self._readmergedriver != configmergedriver
         ):
             raise error.ConfigError(
-                _("merge driver changed since merge started"),
-                hint=_("revert merge driver change or abort merge"),
+                _(b"merge driver changed since merge started"),
+                hint=_(b"revert merge driver change or abort merge"),
             )
 
         return configmergedriver
@@ -384,14 +386,14 @@
     @util.propertycache
     def localctx(self):
         if self._local is None:
-            msg = "localctx accessed but self._local isn't set"
+            msg = b"localctx accessed but self._local isn't set"
             raise error.ProgrammingError(msg)
         return self._repo[self._local]
 
     @util.propertycache
     def otherctx(self):
         if self._other is None:
-            msg = "otherctx accessed but self._other isn't set"
+            msg = b"otherctx accessed but self._other isn't set"
             raise error.ProgrammingError(msg)
         return self._repo[self._other]
 
@@ -425,7 +427,7 @@
             records.append(
                 (
                     RECORD_MERGE_DRIVER_STATE,
-                    '\0'.join([self.mergedriver, self._mdstate]),
+                    b'\0'.join([self.mergedriver, self._mdstate]),
                 )
             )
         # Write out state items. In all cases, the value of the state map entry
@@ -437,7 +439,7 @@
             if v[0] == MERGE_RECORD_DRIVER_RESOLVED:
                 # Driver-resolved merge. These are stored in 'D' records.
                 records.append(
-                    (RECORD_MERGE_DRIVER_MERGE, '\0'.join([filename] + v))
+                    (RECORD_MERGE_DRIVER_MERGE, b'\0'.join([filename] + v))
                 )
             elif v[0] in (
                 MERGE_RECORD_UNRESOLVED_PATH,
@@ -446,7 +448,7 @@
                 # Path conflicts. These are stored in 'P' records.  The current
                 # resolution state ('pu' or 'pr') is stored within the record.
                 records.append(
-                    (RECORD_PATH_CONFLICT, '\0'.join([filename] + v))
+                    (RECORD_PATH_CONFLICT, b'\0'.join([filename] + v))
                 )
             elif v[1] == nullhex or v[6] == nullhex:
                 # Change/Delete or Delete/Change conflicts. These are stored in
@@ -454,20 +456,20 @@
                 # file is deleted locally ('dc'). v[6] is the remote file, and
                 # is nullhex when the file is deleted remotely ('cd').
                 records.append(
-                    (RECORD_CHANGEDELETE_CONFLICT, '\0'.join([filename] + v))
+                    (RECORD_CHANGEDELETE_CONFLICT, b'\0'.join([filename] + v))
                 )
             else:
                 # Normal files.  These are stored in 'F' records.
-                records.append((RECORD_MERGED, '\0'.join([filename] + v)))
+                records.append((RECORD_MERGED, b'\0'.join([filename] + v)))
         for filename, extras in sorted(self._stateextras.iteritems()):
-            rawextras = '\0'.join(
-                '%s\0%s' % (k, v) for k, v in extras.iteritems()
+            rawextras = b'\0'.join(
+                b'%s\0%s' % (k, v) for k, v in extras.iteritems()
             )
             records.append(
-                (RECORD_FILE_VALUES, '%s\0%s' % (filename, rawextras))
+                (RECORD_FILE_VALUES, b'%s\0%s' % (filename, rawextras))
             )
         if self._labels is not None:
-            labels = '\0'.join(self._labels)
+            labels = b'\0'.join(self._labels)
             records.append((RECORD_LABELS, labels))
         return records
 
@@ -478,14 +480,14 @@
 
     def _writerecordsv1(self, records):
         """Write current state on disk in a version 1 file"""
-        f = self._repo.vfs(self.statepathv1, 'wb')
+        f = self._repo.vfs(self.statepathv1, b'wb')
         irecords = iter(records)
         lrecords = next(irecords)
         assert lrecords[0] == RECORD_LOCAL
-        f.write(hex(self._local) + '\n')
+        f.write(hex(self._local) + b'\n')
         for rtype, data in irecords:
             if rtype == RECORD_MERGED:
-                f.write('%s\n' % _droponode(data))
+                f.write(b'%s\n' % _droponode(data))
         f.close()
 
     def _writerecordsv2(self, records):
@@ -494,12 +496,12 @@
         See the docstring for _readrecordsv2 for why we use 't'."""
         # these are the records that all version 2 clients can read
         allowlist = (RECORD_LOCAL, RECORD_OTHER, RECORD_MERGED)
-        f = self._repo.vfs(self.statepathv2, 'wb')
+        f = self._repo.vfs(self.statepathv2, b'wb')
         for key, data in records:
             assert len(key) == 1
             if key not in allowlist:
-                key, data = RECORD_OVERRIDE, '%s%s' % (key, data)
-            format = '>sI%is' % len(data)
+                key, data = RECORD_OVERRIDE, b'%s%s' % (key, data)
+            format = b'>sI%is' % len(data)
             f.write(_pack(format, key, len(data), data))
         f.close()
 
@@ -523,7 +525,7 @@
             localkey = nullhex
         else:
             localkey = mergestate.getlocalkey(fcl.path())
-            self._repo.vfs.write('merge/' + localkey, fcl.data())
+            self._repo.vfs.write(b'merge/' + localkey, fcl.data())
         self._state[fd] = [
             MERGE_RECORD_UNRESOLVED,
             localkey,
@@ -534,7 +536,7 @@
             hex(fco.filenode()),
             fcl.flags(),
         ]
-        self._stateextras[fd] = {'ancestorlinknode': hex(fca.node())}
+        self._stateextras[fd] = {b'ancestorlinknode': hex(fca.node())}
         self._dirty = True
 
     def addpath(self, path, frename, forigin):
@@ -593,7 +595,7 @@
         state, localkey, lfile, afile, anode, ofile, onode, flags = stateentry
         octx = self._repo[self._other]
         extras = self.extras(dfile)
-        anccommitnode = extras.get('ancestorlinknode')
+        anccommitnode = extras.get(b'ancestorlinknode')
         if anccommitnode:
             actx = self._repo[anccommitnode]
         else:
@@ -605,13 +607,13 @@
         # "premerge" x flags
         flo = fco.flags()
         fla = fca.flags()
-        if 'x' in flags + flo + fla and 'l' not in flags + flo + fla:
+        if b'x' in flags + flo + fla and b'l' not in flags + flo + fla:
             if fca.node() == nullid and flags != flo:
                 if preresolve:
                     self._repo.ui.warn(
                         _(
-                            'warning: cannot merge flags for %s '
-                            'without common ancestor - keeping local flags\n'
+                            b'warning: cannot merge flags for %s '
+                            b'without common ancestor - keeping local flags\n'
                         )
                         % afile
                     )
@@ -620,7 +622,7 @@
         if preresolve:
             # restore local
             if localkey != nullhex:
-                f = self._repo.vfs('merge/' + localkey)
+                f = self._repo.vfs(b'merge/' + localkey)
                 wctx[dfile].write(f.read(), flags)
                 f.close()
             else:
@@ -724,7 +726,7 @@
         }
         for f, (r, action) in self._results.iteritems():
             if action is not None:
-                actions[action].append((f, None, "merge result"))
+                actions[action].append((f, None, b"merge result"))
         return actions
 
     def recordactions(self):
@@ -753,11 +755,11 @@
 
 def _getcheckunknownconfig(repo, section, name):
     config = repo.ui.config(section, name)
-    valid = ['abort', 'ignore', 'warn']
+    valid = [b'abort', b'ignore', b'warn']
     if config not in valid:
-        validstr = ', '.join(["'" + v + "'" for v in valid])
+        validstr = b', '.join([b"'" + v + b"'" for v in valid])
         raise error.ConfigError(
-            _("%s.%s not valid " "('%s' is none of %s)")
+            _(b"%s.%s not valid " b"('%s' is none of %s)")
             % (section, name, config, validstr)
         )
     return config
@@ -848,15 +850,17 @@
     pathconflicts = set()
     warnconflicts = set()
     abortconflicts = set()
-    unknownconfig = _getcheckunknownconfig(repo, 'merge', 'checkunknown')
-    ignoredconfig = _getcheckunknownconfig(repo, 'merge', 'checkignored')
-    pathconfig = repo.ui.configbool('experimental', 'merge.checkpathconflicts')
+    unknownconfig = _getcheckunknownconfig(repo, b'merge', b'checkunknown')
+    ignoredconfig = _getcheckunknownconfig(repo, b'merge', b'checkignored')
+    pathconfig = repo.ui.configbool(
+        b'experimental', b'merge.checkpathconflicts'
+    )
     if not force:
 
         def collectconflicts(conflicts, config):
-            if config == 'abort':
+            if config == b'abort':
                 abortconflicts.update(conflicts)
-            elif config == 'warn':
+            elif config == b'warn':
                 warnconflicts.update(conflicts)
 
         checkunknowndirs = _unknowndirschecker()
@@ -900,42 +904,42 @@
                 #     don't like an abort happening in the middle of
                 #     merge.update.
                 if not different:
-                    actions[f] = (ACTION_GET, (fl2, False), 'remote created')
-                elif mergeforce or config == 'abort':
+                    actions[f] = (ACTION_GET, (fl2, False), b'remote created')
+                elif mergeforce or config == b'abort':
                     actions[f] = (
                         ACTION_MERGE,
                         (f, f, None, False, anc),
-                        'remote differs from untracked local',
+                        b'remote differs from untracked local',
                     )
-                elif config == 'abort':
+                elif config == b'abort':
                     abortconflicts.add(f)
                 else:
-                    if config == 'warn':
+                    if config == b'warn':
                         warnconflicts.add(f)
-                    actions[f] = (ACTION_GET, (fl2, True), 'remote created')
+                    actions[f] = (ACTION_GET, (fl2, True), b'remote created')
 
     for f in sorted(abortconflicts):
         warn = repo.ui.warn
         if f in pathconflicts:
             if repo.wvfs.isfileorlink(f):
-                warn(_("%s: untracked file conflicts with directory\n") % f)
+                warn(_(b"%s: untracked file conflicts with directory\n") % f)
             else:
-                warn(_("%s: untracked directory conflicts with file\n") % f)
+                warn(_(b"%s: untracked directory conflicts with file\n") % f)
         else:
-            warn(_("%s: untracked file differs\n") % f)
+            warn(_(b"%s: untracked file differs\n") % f)
     if abortconflicts:
         raise error.Abort(
             _(
-                "untracked files in working directory "
-                "differ from files in requested revision"
+                b"untracked files in working directory "
+                b"differ from files in requested revision"
             )
         )
 
     for f in sorted(warnconflicts):
         if repo.wvfs.isfileorlink(f):
-            repo.ui.warn(_("%s: replacing untracked file\n") % f)
+            repo.ui.warn(_(b"%s: replacing untracked file\n") % f)
         else:
-            repo.ui.warn(_("%s: replacing untracked files in directory\n") % f)
+            repo.ui.warn(_(b"%s: replacing untracked files in directory\n") % f)
 
     for f, (m, args, msg) in actions.iteritems():
         if m == ACTION_CREATED:
@@ -969,12 +973,12 @@
         m = ACTION_REMOVE
     for f in wctx.deleted():
         if f not in mctx:
-            actions[f] = m, None, "forget deleted"
+            actions[f] = m, None, b"forget deleted"
 
     if not branchmerge:
         for f in wctx.removed():
             if f not in mctx:
-                actions[f] = ACTION_FORGET, None, "forget removed"
+                actions[f] = ACTION_FORGET, None, b"forget removed"
 
     return actions
 
@@ -1032,22 +1036,22 @@
         fold = util.normcase(f)
         if fold in foldmap:
             raise error.Abort(
-                _("case-folding collision between %s and %s")
+                _(b"case-folding collision between %s and %s")
                 % (f, foldmap[fold])
             )
         foldmap[fold] = f
 
     # check case-folding of directories
-    foldprefix = unfoldprefix = lastfull = ''
+    foldprefix = unfoldprefix = lastfull = b''
     for fold, f in sorted(foldmap.items()):
         if fold.startswith(foldprefix) and not f.startswith(unfoldprefix):
             # the folded prefix matches but actual casing is different
             raise error.Abort(
-                _("case-folding collision between " "%s and directory of %s")
+                _(b"case-folding collision between " b"%s and directory of %s")
                 % (lastfull, f)
             )
-        foldprefix = fold + '/'
-        unfoldprefix = f + '/'
+        foldprefix = fold + b'/'
+        unfoldprefix = f + b'/'
         lastfull = f
 
 
@@ -1154,18 +1158,18 @@
     # Rename all local conflicting files that have not been deleted.
     for p in localconflicts:
         if p not in deletedfiles:
-            ctxname = bytes(wctx).rstrip('+')
+            ctxname = bytes(wctx).rstrip(b'+')
             pnew = util.safename(p, ctxname, wctx, set(actions.keys()))
             actions[pnew] = (
                 ACTION_PATH_CONFLICT_RESOLVE,
                 (p,),
-                'local path conflict',
+                b'local path conflict',
             )
-            actions[p] = (ACTION_PATH_CONFLICT, (pnew, 'l'), 'path conflict')
+            actions[p] = (ACTION_PATH_CONFLICT, (pnew, b'l'), b'path conflict')
 
     if remoteconflicts:
         # Check if all files in the conflicting directories have been removed.
-        ctxname = bytes(mctx).rstrip('+')
+        ctxname = bytes(mctx).rstrip(b'+')
         for f, p in _filesindirs(repo, mf, remoteconflicts):
             if f not in deletedfiles:
                 m, args, msg = actions[p]
@@ -1179,20 +1183,20 @@
                     actions[pnew] = (
                         ACTION_LOCAL_DIR_RENAME_GET,
                         (p, fl),
-                        'remote path conflict',
+                        b'remote path conflict',
                     )
                 actions[p] = (
                     ACTION_PATH_CONFLICT,
                     (pnew, ACTION_REMOVE),
-                    'path conflict',
+                    b'path conflict',
                 )
                 remoteconflicts.remove(p)
                 break
 
     if invalidconflicts:
         for p in invalidconflicts:
-            repo.ui.warn(_("%s: is both a file and a directory\n") % p)
-        raise error.Abort(_("destination manifest contains path conflicts"))
+            repo.ui.warn(_(b"%s: is both a file and a directory\n") % p)
+        raise error.Abort(_(b"destination manifest contains path conflicts"))
 
 
 def _filternarrowactions(narrowmatch, branchmerge, actions):
@@ -1202,8 +1206,8 @@
     Raise an exception if the merge cannot be completed because the repo is
     narrowed.
     """
-    nooptypes = {'k'}  # TODO: handle with nonconflicttypes
-    nonconflicttypes = set('a am c cm f g r e'.split())
+    nooptypes = {b'k'}  # TODO: handle with nonconflicttypes
+    nonconflicttypes = set(b'a am c cm f g r e'.split())
     # We mutate the items in the dict during iteration, so iterate
     # over a copy.
     for f, action in list(actions.items()):
@@ -1216,15 +1220,15 @@
         elif action[0] in nonconflicttypes:
             raise error.Abort(
                 _(
-                    'merge affects file \'%s\' outside narrow, '
-                    'which is not yet supported'
+                    b'merge affects file \'%s\' outside narrow, '
+                    b'which is not yet supported'
                 )
                 % f,
-                hint=_('merging in the other direction ' 'may work'),
+                hint=_(b'merging in the other direction ' b'may work'),
             )
         else:
             raise error.Abort(
-                _('conflict in file \'%s\' is outside ' 'narrow clone') % f
+                _(b'conflict in file \'%s\' is outside ' b'narrow clone') % f
             )
 
 
@@ -1265,22 +1269,22 @@
     boolbm = pycompat.bytestr(bool(branchmerge))
     boolf = pycompat.bytestr(bool(force))
     boolm = pycompat.bytestr(bool(matcher))
-    repo.ui.note(_("resolving manifests\n"))
+    repo.ui.note(_(b"resolving manifests\n"))
     repo.ui.debug(
-        " branchmerge: %s, force: %s, partial: %s\n" % (boolbm, boolf, boolm)
+        b" branchmerge: %s, force: %s, partial: %s\n" % (boolbm, boolf, boolm)
     )
-    repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
+    repo.ui.debug(b" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
 
     m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
     copied = set(copy.values())
     copied.update(movewithdir.values())
 
-    if '.hgsubstate' in m1 and wctx.rev() is None:
+    if b'.hgsubstate' in m1 and wctx.rev() is None:
         # Check whether sub state is modified, and overwrite the manifest
         # to flag the change. If wctx is a committed revision, we shouldn't
         # care for the dirty state of the working directory.
         if any(wctx.sub(s).dirty() for s in wctx.substate):
-            m1['.hgsubstate'] = modifiednodeid
+            m1[b'.hgsubstate'] = modifiednodeid
 
     # Don't use m2-vs-ma optimization if:
     # - ma is the same as m1 or m2, which we're just going to diff again later
@@ -1312,38 +1316,42 @@
                     actions[f] = (
                         ACTION_MERGE,
                         (f, f, fa, False, pa.node()),
-                        'both renamed from %s' % fa,
+                        b'both renamed from %s' % fa,
                     )
                 else:
                     actions[f] = (
                         ACTION_MERGE,
                         (f, f, None, False, pa.node()),
-                        'both created',
+                        b'both created',
                     )
             else:
                 a = ma[f]
                 fla = ma.flags(f)
-                nol = 'l' not in fl1 + fl2 + fla
+                nol = b'l' not in fl1 + fl2 + fla
                 if n2 == a and fl2 == fla:
-                    actions[f] = (ACTION_KEEP, (), 'remote unchanged')
+                    actions[f] = (ACTION_KEEP, (), b'remote unchanged')
                 elif n1 == a and fl1 == fla:  # local unchanged - use remote
                     if n1 == n2:  # optimization: keep local content
-                        actions[f] = (ACTION_EXEC, (fl2,), 'update permissions')
+                        actions[f] = (
+                            ACTION_EXEC,
+                            (fl2,),
+                            b'update permissions',
+                        )
                     else:
                         actions[f] = (
                             ACTION_GET,
                             (fl2, False),
-                            'remote is newer',
+                            b'remote is newer',
                         )
                 elif nol and n2 == a:  # remote only changed 'x'
-                    actions[f] = (ACTION_EXEC, (fl2,), 'update permissions')
+                    actions[f] = (ACTION_EXEC, (fl2,), b'update permissions')
                 elif nol and n1 == a:  # local only changed 'x'
-                    actions[f] = (ACTION_GET, (fl1, False), 'remote is newer')
+                    actions[f] = (ACTION_GET, (fl1, False), b'remote is newer')
                 else:  # both changed something
                     actions[f] = (
                         ACTION_MERGE,
                         (f, f, f, False, pa.node()),
-                        'versions differ',
+                        b'versions differ',
                     )
         elif n1:  # file exists only on local side
             if f in copied:
@@ -1354,38 +1362,38 @@
                     actions[f2] = (
                         ACTION_MERGE,
                         (f, f2, None, True, pa.node()),
-                        'remote directory rename, both created',
+                        b'remote directory rename, both created',
                     )
                 else:
                     actions[f2] = (
                         ACTION_DIR_RENAME_MOVE_LOCAL,
                         (f, fl1),
-                        'remote directory rename - move from %s' % f,
+                        b'remote directory rename - move from %s' % f,
                     )
             elif f in copy:
                 f2 = copy[f]
                 actions[f] = (
                     ACTION_MERGE,
                     (f, f2, f2, False, pa.node()),
-                    'local copied/moved from %s' % f2,
+                    b'local copied/moved from %s' % f2,
                 )
             elif f in ma:  # clean, a different, no remote
                 if n1 != ma[f]:
                     if acceptremote:
-                        actions[f] = (ACTION_REMOVE, None, 'remote delete')
+                        actions[f] = (ACTION_REMOVE, None, b'remote delete')
                     else:
                         actions[f] = (
                             ACTION_CHANGED_DELETED,
                             (f, None, f, False, pa.node()),
-                            'prompt changed/deleted',
+                            b'prompt changed/deleted',
                         )
                 elif n1 == addednodeid:
                     # This extra 'a' is added by working copy manifest to mark
                     # the file as locally added. We should forget it instead of
                     # deleting it.
-                    actions[f] = (ACTION_FORGET, None, 'remote deleted')
+                    actions[f] = (ACTION_FORGET, None, b'remote deleted')
                 else:
-                    actions[f] = (ACTION_REMOVE, None, 'other deleted')
+                    actions[f] = (ACTION_REMOVE, None, b'other deleted')
         elif n2:  # file exists only on remote side
             if f in copied:
                 pass  # we'll deal with it on m1 side
@@ -1395,13 +1403,13 @@
                     actions[f2] = (
                         ACTION_MERGE,
                         (f2, f, None, False, pa.node()),
-                        'local directory rename, both created',
+                        b'local directory rename, both created',
                     )
                 else:
                     actions[f2] = (
                         ACTION_LOCAL_DIR_RENAME_GET,
                         (f, fl2),
-                        'local directory rename - get from %s' % f,
+                        b'local directory rename - get from %s' % f,
                     )
             elif f in copy:
                 f2 = copy[f]
@@ -1409,13 +1417,13 @@
                     actions[f] = (
                         ACTION_MERGE,
                         (f2, f, f2, False, pa.node()),
-                        'remote copied from %s' % f2,
+                        b'remote copied from %s' % f2,
                     )
                 else:
                     actions[f] = (
                         ACTION_MERGE,
                         (f2, f, f2, True, pa.node()),
-                        'remote moved from %s' % f2,
+                        b'remote moved from %s' % f2,
                     )
             elif f not in ma:
                 # local unknown, remote created: the logic is described by the
@@ -1430,14 +1438,14 @@
                 # Checking whether the files are different is expensive, so we
                 # don't do that when we can avoid it.
                 if not force:
-                    actions[f] = (ACTION_CREATED, (fl2,), 'remote created')
+                    actions[f] = (ACTION_CREATED, (fl2,), b'remote created')
                 elif not branchmerge:
-                    actions[f] = (ACTION_CREATED, (fl2,), 'remote created')
+                    actions[f] = (ACTION_CREATED, (fl2,), b'remote created')
                 else:
                     actions[f] = (
                         ACTION_CREATED_MERGE,
                         (fl2, pa.node()),
-                        'remote created, get or merge',
+                        b'remote created, get or merge',
                     )
             elif n2 != ma[f]:
                 df = None
@@ -1450,18 +1458,19 @@
                     actions[df] = (
                         ACTION_MERGE,
                         (df, f, f, False, pa.node()),
-                        'local directory rename - respect move ' 'from %s' % f,
+                        b'local directory rename - respect move '
+                        b'from %s' % f,
                     )
                 elif acceptremote:
-                    actions[f] = (ACTION_CREATED, (fl2,), 'remote recreating')
+                    actions[f] = (ACTION_CREATED, (fl2,), b'remote recreating')
                 else:
                     actions[f] = (
                         ACTION_DELETED_CHANGED,
                         (None, f, f, False, pa.node()),
-                        'prompt deleted/changed',
+                        b'prompt deleted/changed',
                     )
 
-    if repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
+    if repo.ui.configbool(b'experimental', b'merge.checkpathconflicts'):
         # If we are merging, look for path conflicts.
         checkpathconflicts(repo, wctx, p2, actions)
 
@@ -1485,7 +1494,7 @@
             and not wctx[f].cmp(ancestor[f])
         ):
             # local did change but ended up with same content
-            actions[f] = ACTION_REMOVE, None, 'prompt same'
+            actions[f] = ACTION_REMOVE, None, b'prompt same'
         elif (
             m == ACTION_DELETED_CHANGED
             and f in ancestor
@@ -1527,11 +1536,11 @@
 
     else:  # only when merge.preferancestor=* - the default
         repo.ui.note(
-            _("note: merging %s and %s using bids from ancestors %s\n")
+            _(b"note: merging %s and %s using bids from ancestors %s\n")
             % (
                 wctx,
                 mctx,
-                _(' and ').join(pycompat.bytestr(anc) for anc in ancestors),
+                _(b' and ').join(pycompat.bytestr(anc) for anc in ancestors),
             )
         )
 
@@ -1541,7 +1550,7 @@
         )  # mapping filename to bids (action method to list af actions)
         diverge, renamedelete = None, None
         for ancestor in ancestors:
-            repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor)
+            repo.ui.note(_(b'\ncalculating bids for ancestor %s\n') % ancestor)
             actions, diverge1, renamedelete1 = manifestmerge(
                 repo,
                 wctx,
@@ -1565,7 +1574,7 @@
 
             for f, a in sorted(actions.iteritems()):
                 m, args, msg = a
-                repo.ui.debug(' %s: %s -> %s\n' % (f, msg, m))
+                repo.ui.debug(b' %s: %s -> %s\n' % (f, msg, m))
                 if f in fbids:
                     d = fbids[f]
                     if m in d:
@@ -1576,7 +1585,7 @@
                     fbids[f] = {m: [a]}
 
         # Pick the best bid for each file
-        repo.ui.note(_('\nauction for merging merge bids\n'))
+        repo.ui.note(_(b'\nauction for merging merge bids\n'))
         actions = {}
         for f, bids in sorted(fbids.items()):
             # bids is a mapping from action method to list af actions
@@ -1584,35 +1593,35 @@
             if len(bids) == 1:  # all bids are the same kind of method
                 m, l = list(bids.items())[0]
                 if all(a == l[0] for a in l[1:]):  # len(bids) is > 1
-                    repo.ui.note(_(" %s: consensus for %s\n") % (f, m))
+                    repo.ui.note(_(b" %s: consensus for %s\n") % (f, m))
                     actions[f] = l[0]
                     continue
             # If keep is an option, just do it.
             if ACTION_KEEP in bids:
-                repo.ui.note(_(" %s: picking 'keep' action\n") % f)
+                repo.ui.note(_(b" %s: picking 'keep' action\n") % f)
                 actions[f] = bids[ACTION_KEEP][0]
                 continue
             # If there are gets and they all agree [how could they not?], do it.
             if ACTION_GET in bids:
                 ga0 = bids[ACTION_GET][0]
                 if all(a == ga0 for a in bids[ACTION_GET][1:]):
-                    repo.ui.note(_(" %s: picking 'get' action\n") % f)
+                    repo.ui.note(_(b" %s: picking 'get' action\n") % f)
                     actions[f] = ga0
                     continue
             # TODO: Consider other simple actions such as mode changes
             # Handle inefficient democrazy.
-            repo.ui.note(_(' %s: multiple bids for merge action:\n') % f)
+            repo.ui.note(_(b' %s: multiple bids for merge action:\n') % f)
             for m, l in sorted(bids.items()):
                 for _f, args, msg in l:
-                    repo.ui.note('  %s -> %s\n' % (msg, m))
+                    repo.ui.note(b'  %s -> %s\n' % (msg, m))
             # Pick random action. TODO: Instead, prompt user when resolving
             m, l = list(bids.items())[0]
             repo.ui.warn(
-                _(' %s: ambiguous merge - picked %s action\n') % (f, m)
+                _(b' %s: ambiguous merge - picked %s action\n') % (f, m)
             )
             actions[f] = l[0]
             continue
-        repo.ui.note(_('end of auction\n\n'))
+        repo.ui.note(_(b'end of auction\n\n'))
 
     if wctx.rev() is None:
         fractions = _forgetremoved(wctx, mctx, branchmerge)
@@ -1644,15 +1653,15 @@
     cwd = _getcwd()
     i = 0
     for f, args, msg in actions:
-        repo.ui.debug(" %s: %s -> r\n" % (f, msg))
+        repo.ui.debug(b" %s: %s -> r\n" % (f, msg))
         if verbose:
-            repo.ui.note(_("removing %s\n") % f)
+            repo.ui.note(_(b"removing %s\n") % f)
         wctx[f].audit()
         try:
             wctx[f].remove(ignoremissing=True)
         except OSError as inst:
             repo.ui.warn(
-                _("update failed to remove %s: %s!\n") % (f, inst.strerror)
+                _(b"update failed to remove %s: %s!\n") % (f, inst.strerror)
             )
         if i == 100:
             yield i, f
@@ -1666,8 +1675,8 @@
         # warning.
         repo.ui.warn(
             _(
-                "current directory was removed\n"
-                "(consider changing to repo root: %s)\n"
+                b"current directory was removed\n"
+                b"(consider changing to repo root: %s)\n"
             )
             % repo.root
         )
@@ -1690,9 +1699,9 @@
     i = 0
     with repo.wvfs.backgroundclosing(ui, expectedcount=len(actions)):
         for f, (flags, backup), msg in actions:
-            repo.ui.debug(" %s: %s -> g\n" % (f, msg))
+            repo.ui.debug(b" %s: %s -> g\n" % (f, msg))
             if verbose:
-                repo.ui.note(_("getting %s\n") % f)
+                repo.ui.note(_(b"getting %s\n") % f)
 
             if backup:
                 # If a file or directory exists with the same name, back that
@@ -1709,7 +1718,7 @@
                     util.rename(repo.wjoin(conflicting), orig)
             wfctx = wctx[f]
             wfctx.clearunknown()
-            atomictemp = ui.configbool("experimental", "update.atomic-file")
+            atomictemp = ui.configbool(b"experimental", b"update.atomic-file")
             size = wfctx.write(
                 fctx(f).data(),
                 flags,
@@ -1822,12 +1831,12 @@
     mergeactions.extend(actions[ACTION_MERGE])
     for f, args, msg in mergeactions:
         f1, f2, fa, move, anc = args
-        if f == '.hgsubstate':  # merged internally
+        if f == b'.hgsubstate':  # merged internally
             continue
         if f1 is None:
             fcl = filemerge.absentfilectx(wctx, fa)
         else:
-            repo.ui.debug(" preserving %s for resolve of %s\n" % (f1, f))
+            repo.ui.debug(b" preserving %s for resolve of %s\n" % (f1, f))
             fcl = wctx[f1]
         if f2 is None:
             fco = filemerge.absentfilectx(mctx, fa)
@@ -1846,16 +1855,16 @@
     # remove renamed files after safely stored
     for f in moves:
         if wctx[f].lexists():
-            repo.ui.debug("removing %s\n" % f)
+            repo.ui.debug(b"removing %s\n" % f)
             wctx[f].audit()
             wctx[f].remove()
 
     numupdates = sum(len(l) for m, l in actions.items() if m != ACTION_KEEP)
     progress = repo.ui.makeprogress(
-        _('updating'), unit=_('files'), total=numupdates
+        _(b'updating'), unit=_(b'files'), total=numupdates
     )
 
-    if [a for a in actions[ACTION_REMOVE] if a[0] == '.hgsubstate']:
+    if [a for a in actions[ACTION_REMOVE] if a[0] == b'.hgsubstate']:
         subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
 
     # record path conflicts
@@ -1864,16 +1873,16 @@
         s = repo.ui.status
         s(
             _(
-                "%s: path conflict - a file or link has the same name as a "
-                "directory\n"
+                b"%s: path conflict - a file or link has the same name as a "
+                b"directory\n"
             )
             % f
         )
-        if fo == 'l':
-            s(_("the local file has been renamed to %s\n") % f1)
+        if fo == b'l':
+            s(_(b"the local file has been renamed to %s\n") % f1)
         else:
-            s(_("the remote file has been renamed to %s\n") % f1)
-        s(_("resolve manually then use 'hg resolve --mark %s'\n") % f)
+            s(_(b"the remote file has been renamed to %s\n") % f1)
+        s(_(b"resolve manually then use 'hg resolve --mark %s'\n") % f)
         ms.addpath(f, f1, fo)
         progress.increment(item=f)
 
@@ -1891,10 +1900,10 @@
 
     # resolve path conflicts (must come before getting)
     for f, args, msg in actions[ACTION_PATH_CONFLICT_RESOLVE]:
-        repo.ui.debug(" %s: %s -> pr\n" % (f, msg))
+        repo.ui.debug(b" %s: %s -> pr\n" % (f, msg))
         (f0,) = args
         if wctx[f0].lexists():
-            repo.ui.note(_("moving %s to %s\n") % (f0, f))
+            repo.ui.note(_(b"moving %s to %s\n") % (f0, f))
             wctx[f].audit()
             wctx[f].write(wctx.filectx(f0).data(), wctx.filectx(f0).flags())
             wctx[f0].remove()
@@ -1902,7 +1911,7 @@
 
     # get in parallel.
     threadsafe = repo.ui.configbool(
-        'experimental', 'worker.wdir-get-thread-safe'
+        b'experimental', b'worker.wdir-get-thread-safe'
     )
     prog = worker.worker(
         repo.ui,
@@ -1922,35 +1931,35 @@
             progress.increment(step=i, item=item)
     updated = len(actions[ACTION_GET])
 
-    if [a for a in actions[ACTION_GET] if a[0] == '.hgsubstate']:
+    if [a for a in actions[ACTION_GET] if a[0] == b'.hgsubstate']:
         subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
 
     # forget (manifest only, just log it) (must come first)
     for f, args, msg in actions[ACTION_FORGET]:
-        repo.ui.debug(" %s: %s -> f\n" % (f, msg))
+        repo.ui.debug(b" %s: %s -> f\n" % (f, msg))
         progress.increment(item=f)
 
     # re-add (manifest only, just log it)
     for f, args, msg in actions[ACTION_ADD]:
-        repo.ui.debug(" %s: %s -> a\n" % (f, msg))
+        repo.ui.debug(b" %s: %s -> a\n" % (f, msg))
         progress.increment(item=f)
 
     # re-add/mark as modified (manifest only, just log it)
     for f, args, msg in actions[ACTION_ADD_MODIFIED]:
-        repo.ui.debug(" %s: %s -> am\n" % (f, msg))
+        repo.ui.debug(b" %s: %s -> am\n" % (f, msg))
         progress.increment(item=f)
 
     # keep (noop, just log it)
     for f, args, msg in actions[ACTION_KEEP]:
-        repo.ui.debug(" %s: %s -> k\n" % (f, msg))
+        repo.ui.debug(b" %s: %s -> k\n" % (f, msg))
         # no progress
 
     # directory rename, move local
     for f, args, msg in actions[ACTION_DIR_RENAME_MOVE_LOCAL]:
-        repo.ui.debug(" %s: %s -> dm\n" % (f, msg))
+        repo.ui.debug(b" %s: %s -> dm\n" % (f, msg))
         progress.increment(item=f)
         f0, flags = args
-        repo.ui.note(_("moving %s to %s\n") % (f0, f))
+        repo.ui.note(_(b"moving %s to %s\n") % (f0, f))
         wctx[f].audit()
         wctx[f].write(wctx.filectx(f0).data(), flags)
         wctx[f0].remove()
@@ -1958,20 +1967,20 @@
 
     # local directory rename, get
     for f, args, msg in actions[ACTION_LOCAL_DIR_RENAME_GET]:
-        repo.ui.debug(" %s: %s -> dg\n" % (f, msg))
+        repo.ui.debug(b" %s: %s -> dg\n" % (f, msg))
         progress.increment(item=f)
         f0, flags = args
-        repo.ui.note(_("getting %s to %s\n") % (f0, f))
+        repo.ui.note(_(b"getting %s to %s\n") % (f0, f))
         wctx[f].write(mctx.filectx(f0).data(), flags)
         updated += 1
 
     # exec
     for f, args, msg in actions[ACTION_EXEC]:
-        repo.ui.debug(" %s: %s -> e\n" % (f, msg))
+        repo.ui.debug(b" %s: %s -> e\n" % (f, msg))
         progress.increment(item=f)
         (flags,) = args
         wctx[f].audit()
-        wctx[f].setflags('l' in flags, 'x' in flags)
+        wctx[f].setflags(b'l' in flags, b'x' in flags)
         updated += 1
 
     # the ordering is important here -- ms.mergedriver will raise if the merge
@@ -1982,7 +1991,7 @@
     if usemergedriver:
         if wctx.isinmemory():
             raise error.InMemoryMergeConflictsError(
-                "in-memory merge does not " "support mergedriver"
+                b"in-memory merge does not " b"support mergedriver"
             )
         ms.commit()
         proceed = driverpreprocess(repo, ms, wctx, labels=labels)
@@ -2004,9 +2013,9 @@
         # premerge
         tocomplete = []
         for f, args, msg in mergeactions:
-            repo.ui.debug(" %s: %s -> m (premerge)\n" % (f, msg))
+            repo.ui.debug(b" %s: %s -> m (premerge)\n" % (f, msg))
             progress.increment(item=f)
-            if f == '.hgsubstate':  # subrepo states need updating
+            if f == b'.hgsubstate':  # subrepo states need updating
                 subrepoutil.submerge(
                     repo, wctx, mctx, wctx.ancestor(mctx), overwrite, labels
                 )
@@ -2019,7 +2028,7 @@
 
         # merge
         for f, args, msg in tocomplete:
-            repo.ui.debug(" %s: %s -> m (merge)\n" % (f, msg))
+            repo.ui.debug(b" %s: %s -> m (merge)\n" % (f, msg))
             progress.increment(item=f, total=numupdates)
             ms.resolve(f, wctx)
 
@@ -2082,7 +2091,7 @@
 
 
 def recordupdates(repo, actions, branchmerge, getfiledata):
-    "record merge actions to the dirstate"
+    b"record merge actions to the dirstate"
     # remove (must come first)
     for f, args, msg in actions.get(ACTION_REMOVE, []):
         if branchmerge:
@@ -2178,10 +2187,10 @@
             repo.dirstate.normal(f)
 
 
-UPDATECHECK_ABORT = 'abort'  # handled at higher layers
-UPDATECHECK_NONE = 'none'
-UPDATECHECK_LINEAR = 'linear'
-UPDATECHECK_NO_CONFLICT = 'noconflict'
+UPDATECHECK_ABORT = b'abort'  # handled at higher layers
+UPDATECHECK_NONE = b'none'
+UPDATECHECK_LINEAR = b'linear'
+UPDATECHECK_NO_CONFLICT = b'noconflict'
 
 
 def update(
@@ -2293,7 +2302,7 @@
         if ancestor is not None:
             pas = [repo[ancestor]]
         else:
-            if repo.ui.configlist('merge', 'preferancestor') == ['*']:
+            if repo.ui.configlist(b'merge', b'preferancestor') == [b'*']:
                 cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node())
                 pas = [repo[anc] for anc in (sorted(cahs) or [nullid])]
             else:
@@ -2305,31 +2314,31 @@
         ### check phase
         if not overwrite:
             if len(pl) > 1:
-                raise error.Abort(_("outstanding uncommitted merge"))
+                raise error.Abort(_(b"outstanding uncommitted merge"))
             ms = mergestate.read(repo)
             if list(ms.unresolved()):
                 raise error.Abort(
-                    _("outstanding merge conflicts"),
-                    hint=_("use 'hg resolve' to resolve"),
+                    _(b"outstanding merge conflicts"),
+                    hint=_(b"use 'hg resolve' to resolve"),
                 )
         if branchmerge:
             if pas == [p2]:
                 raise error.Abort(
                     _(
-                        "merging with a working directory ancestor"
-                        " has no effect"
+                        b"merging with a working directory ancestor"
+                        b" has no effect"
                     )
                 )
             elif pas == [p1]:
                 if not mergeancestor and wc.branch() == p2.branch():
                     raise error.Abort(
-                        _("nothing to merge"),
-                        hint=_("use 'hg update' " "or check 'hg heads'"),
+                        _(b"nothing to merge"),
+                        hint=_(b"use 'hg update' " b"or check 'hg heads'"),
                     )
             if not force and (wc.files() or wc.deleted()):
                 raise error.Abort(
-                    _("uncommitted changes"),
-                    hint=_("use 'hg status' to list changes"),
+                    _(b"uncommitted changes"),
+                    hint=_(b"use 'hg status' to list changes"),
                 )
             if not wc.isinmemory():
                 for s in sorted(wc.substate):
@@ -2338,8 +2347,8 @@
         elif not overwrite:
             if p1 == p2:  # no-op update
                 # call the hooks and exit early
-                repo.hook('preupdate', throw=True, parent1=xp2, parent2='')
-                repo.hook('update', parent1=xp2, parent2='', error=0)
+                repo.hook(b'preupdate', throw=True, parent1=xp2, parent2=b'')
+                repo.hook(b'update', parent1=xp2, parent2=b'', error=0)
                 return updateresult(0, 0, 0, 0)
 
             if updatecheck == UPDATECHECK_LINEAR and pas not in (
@@ -2355,8 +2364,8 @@
                     if repo[node].node() in foreground:
                         pass  # allow updating to successors
                     else:
-                        msg = _("uncommitted changes")
-                        hint = _("commit or update --clean to discard changes")
+                        msg = _(b"uncommitted changes")
+                        hint = _(b"commit or update --clean to discard changes")
                         raise error.UpdateAbort(msg, hint=hint)
                 else:
                     # Allow jumping branches if clean and specific rev given
@@ -2368,7 +2377,7 @@
             pas = [p1]
 
         # deprecated config: merge.followcopies
-        followcopies = repo.ui.configbool('merge', 'followcopies')
+        followcopies = repo.ui.configbool(b'merge', b'followcopies')
         if overwrite:
             followcopies = False
         elif not pas[0]:
@@ -2399,42 +2408,46 @@
                     ACTION_REMOVE,
                     ACTION_PATH_CONFLICT_RESOLVE,
                 ):
-                    msg = _("conflicting changes")
-                    hint = _("commit or update --clean to discard changes")
+                    msg = _(b"conflicting changes")
+                    hint = _(b"commit or update --clean to discard changes")
                     raise error.Abort(msg, hint=hint)
 
         # Prompt and create actions. Most of this is in the resolve phase
         # already, but we can't handle .hgsubstate in filemerge or
         # subrepoutil.submerge yet so we have to keep prompting for it.
-        if '.hgsubstate' in actionbyfile:
-            f = '.hgsubstate'
+        if b'.hgsubstate' in actionbyfile:
+            f = b'.hgsubstate'
             m, args, msg = actionbyfile[f]
             prompts = filemerge.partextras(labels)
-            prompts['f'] = f
+            prompts[b'f'] = f
             if m == ACTION_CHANGED_DELETED:
                 if repo.ui.promptchoice(
                     _(
-                        "local%(l)s changed %(f)s which other%(o)s deleted\n"
-                        "use (c)hanged version or (d)elete?"
-                        "$$ &Changed $$ &Delete"
+                        b"local%(l)s changed %(f)s which other%(o)s deleted\n"
+                        b"use (c)hanged version or (d)elete?"
+                        b"$$ &Changed $$ &Delete"
                     )
                     % prompts,
                     0,
                 ):
-                    actionbyfile[f] = (ACTION_REMOVE, None, 'prompt delete')
+                    actionbyfile[f] = (ACTION_REMOVE, None, b'prompt delete')
                 elif f in p1:
-                    actionbyfile[f] = (ACTION_ADD_MODIFIED, None, 'prompt keep')
+                    actionbyfile[f] = (
+                        ACTION_ADD_MODIFIED,
+                        None,
+                        b'prompt keep',
+                    )
                 else:
-                    actionbyfile[f] = (ACTION_ADD, None, 'prompt keep')
+                    actionbyfile[f] = (ACTION_ADD, None, b'prompt keep')
             elif m == ACTION_DELETED_CHANGED:
                 f1, f2, fa, move, anc = args
                 flags = p2[f2].flags()
                 if (
                     repo.ui.promptchoice(
                         _(
-                            "other%(o)s changed %(f)s which local%(l)s deleted\n"
-                            "use (c)hanged version or leave (d)eleted?"
-                            "$$ &Changed $$ &Deleted"
+                            b"other%(o)s changed %(f)s which local%(l)s deleted\n"
+                            b"use (c)hanged version or leave (d)eleted?"
+                            b"$$ &Changed $$ &Deleted"
                         )
                         % prompts,
                         0,
@@ -2444,7 +2457,7 @@
                     actionbyfile[f] = (
                         ACTION_GET,
                         (flags, False),
-                        'prompt recreating',
+                        b'prompt recreating',
                     )
                 else:
                     del actionbyfile[f]
@@ -2469,33 +2482,33 @@
         for f, fl in sorted(diverge.iteritems()):
             repo.ui.warn(
                 _(
-                    "note: possible conflict - %s was renamed "
-                    "multiple times to:\n"
+                    b"note: possible conflict - %s was renamed "
+                    b"multiple times to:\n"
                 )
                 % f
             )
             for nf in sorted(fl):
-                repo.ui.warn(" %s\n" % nf)
+                repo.ui.warn(b" %s\n" % nf)
 
         # rename and delete
         for f, fl in sorted(renamedelete.iteritems()):
             repo.ui.warn(
                 _(
-                    "note: possible conflict - %s was deleted "
-                    "and renamed to:\n"
+                    b"note: possible conflict - %s was deleted "
+                    b"and renamed to:\n"
                 )
                 % f
             )
             for nf in sorted(fl):
-                repo.ui.warn(" %s\n" % nf)
+                repo.ui.warn(b" %s\n" % nf)
 
         ### apply phase
         if not branchmerge:  # just jump to the new rev
-            fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
+            fp1, fp2, xp1, xp2 = fp2, nullid, xp2, b''
         if not partial and not wc.isinmemory():
-            repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
+            repo.hook(b'preupdate', throw=True, parent1=xp1, parent2=xp2)
             # note that we're in the middle of an update
-            repo.vfs.write('updatestate', p2.hex())
+            repo.vfs.write(b'updatestate', p2.hex())
 
         # Advertise fsmonitor when its presence could be useful.
         #
@@ -2507,16 +2520,16 @@
         #
         # We only allow on Linux and MacOS because that's where fsmonitor is
         # considered stable.
-        fsmonitorwarning = repo.ui.configbool('fsmonitor', 'warn_when_unused')
+        fsmonitorwarning = repo.ui.configbool(b'fsmonitor', b'warn_when_unused')
         fsmonitorthreshold = repo.ui.configint(
-            'fsmonitor', 'warn_update_file_count'
+            b'fsmonitor', b'warn_update_file_count'
         )
         try:
             # avoid cycle: extensions -> cmdutil -> merge
             from . import extensions
 
-            extensions.find('fsmonitor')
-            fsmonitorenabled = repo.ui.config('fsmonitor', 'mode') != 'off'
+            extensions.find(b'fsmonitor')
+            fsmonitorenabled = repo.ui.config(b'fsmonitor', b'mode') != b'off'
             # We intentionally don't look at whether fsmonitor has disabled
             # itself because a) fsmonitor may have already printed a warning
             # b) we only care about the config state here.
@@ -2528,13 +2541,13 @@
             and not fsmonitorenabled
             and p1.node() == nullid
             and len(actions[ACTION_GET]) >= fsmonitorthreshold
-            and pycompat.sysplatform.startswith(('linux', 'darwin'))
+            and pycompat.sysplatform.startswith((b'linux', b'darwin'))
         ):
             repo.ui.warn(
                 _(
-                    '(warning: large working directory being used without '
-                    'fsmonitor enabled; enable fsmonitor to improve performance; '
-                    'see "hg help -e fsmonitor")\n'
+                    b'(warning: large working directory being used without '
+                    b'fsmonitor enabled; enable fsmonitor to improve performance; '
+                    b'see "hg help -e fsmonitor")\n'
                 )
             )
 
@@ -2549,7 +2562,7 @@
                 repo.setparents(fp1, fp2)
                 recordupdates(repo, actions, branchmerge, getfiledata)
                 # update completed, clear state
-                util.unlink(repo.vfs.join('updatestate'))
+                util.unlink(repo.vfs.join(b'updatestate'))
 
                 if not branchmerge:
                     repo.dirstate.setbranch(p2.branch())
@@ -2561,7 +2574,7 @@
 
     if not partial:
         repo.hook(
-            'update', parent1=xp1, parent2=xp2, error=stats.unresolvedcount
+            b'update', parent1=xp1, parent2=xp2, error=stats.unresolvedcount
         )
     return stats
 
@@ -2590,7 +2603,7 @@
     # to copy commits), and 2) informs update that the incoming changes are
     # newer than the destination so it doesn't prompt about "remote changed foo
     # which local deleted".
-    mergeancestor = repo.changelog.isancestor(repo['.'].node(), ctx.node())
+    mergeancestor = repo.changelog.isancestor(repo[b'.'].node(), ctx.node())
 
     stats = update(
         repo,
@@ -2612,7 +2625,7 @@
             pother = parents[0].node()
 
     with repo.dirstate.parentchange():
-        repo.setparents(repo['.'].node(), pother)
+        repo.setparents(repo[b'.'].node(), pother)
         repo.dirstate.write(repo.currenttransaction())
         # fix up dirstate for copies and renames
         copies.duplicatecopies(repo, repo[None], ctx.rev(), pctx.rev())
@@ -2653,11 +2666,11 @@
         try:
             removefn(path)
         except OSError:
-            m = _('%s cannot be removed') % path
+            m = _(b'%s cannot be removed') % path
             if abortonerror:
                 raise error.Abort(m)
             else:
-                repo.ui.warn(_('warning: %s\n') % m)
+                repo.ui.warn(_(b'warning: %s\n') % m)
 
     # There's no API to copy a matcher. So mutate the passed matcher and
     # restore it when we're done.
@@ -2676,7 +2689,7 @@
         if removefiles:
             for f in sorted(status.unknown + status.ignored):
                 if not noop:
-                    repo.ui.note(_('removing file %s\n') % f)
+                    repo.ui.note(_(b'removing file %s\n') % f)
                     remove(repo.wvfs.unlink, f)
                 res.append(f)
 
@@ -2684,7 +2697,7 @@
             for f in sorted(directories, reverse=True):
                 if matcher(f) and not repo.wvfs.listdir(f):
                     if not noop:
-                        repo.ui.note(_('removing directory %s\n') % f)
+                        repo.ui.note(_(b'removing directory %s\n') % f)
                         remove(repo.wvfs.rmdir, f)
                     res.append(f)
 
--- a/mercurial/mergeutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/mergeutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -15,10 +15,10 @@
 def checkunresolved(ms):
     if list(ms.unresolved()):
         raise error.Abort(
-            _("unresolved merge conflicts " "(see 'hg help resolve')")
+            _(b"unresolved merge conflicts " b"(see 'hg help resolve')")
         )
-    if ms.mdstate() != 's' or list(ms.driverresolved()):
+    if ms.mdstate() != b's' or list(ms.driverresolved()):
         raise error.Abort(
-            _('driver-resolved merge conflicts'),
-            hint=_('run "hg resolve --all" to resolve'),
+            _(b'driver-resolved merge conflicts'),
+            hint=_(b'run "hg resolve --all" to resolve'),
         )
--- a/mercurial/minifileset.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/minifileset.py	Sun Oct 06 09:48:39 2019 -0400
@@ -18,49 +18,51 @@
 
 def _sizep(x):
     # i18n: "size" is a keyword
-    expr = filesetlang.getstring(x, _("size requires an expression"))
+    expr = filesetlang.getstring(x, _(b"size requires an expression"))
     return fileset.sizematcher(expr)
 
 
 def _compile(tree):
     if not tree:
-        raise error.ParseError(_("missing argument"))
+        raise error.ParseError(_(b"missing argument"))
     op = tree[0]
-    if op == 'withstatus':
+    if op == b'withstatus':
         return _compile(tree[1])
-    elif op in {'symbol', 'string', 'kindpat'}:
-        name = filesetlang.getpattern(tree, {'path'}, _('invalid file pattern'))
-        if name.startswith('**'):  # file extension test, ex. "**.tar.gz"
+    elif op in {b'symbol', b'string', b'kindpat'}:
+        name = filesetlang.getpattern(
+            tree, {b'path'}, _(b'invalid file pattern')
+        )
+        if name.startswith(b'**'):  # file extension test, ex. "**.tar.gz"
             ext = name[2:]
             for c in pycompat.bytestr(ext):
-                if c in '*{}[]?/\\':
-                    raise error.ParseError(_('reserved character: %s') % c)
+                if c in b'*{}[]?/\\':
+                    raise error.ParseError(_(b'reserved character: %s') % c)
             return lambda n, s: n.endswith(ext)
-        elif name.startswith('path:'):  # directory or full path test
+        elif name.startswith(b'path:'):  # directory or full path test
             p = name[5:]  # prefix
             pl = len(p)
             f = lambda n, s: n.startswith(p) and (
-                len(n) == pl or n[pl : pl + 1] == '/'
+                len(n) == pl or n[pl : pl + 1] == b'/'
             )
             return f
         raise error.ParseError(
-            _("unsupported file pattern: %s") % name,
-            hint=_('paths must be prefixed with "path:"'),
+            _(b"unsupported file pattern: %s") % name,
+            hint=_(b'paths must be prefixed with "path:"'),
         )
-    elif op in {'or', 'patterns'}:
+    elif op in {b'or', b'patterns'}:
         funcs = [_compile(x) for x in tree[1:]]
         return lambda n, s: any(f(n, s) for f in funcs)
-    elif op == 'and':
+    elif op == b'and':
         func1 = _compile(tree[1])
         func2 = _compile(tree[2])
         return lambda n, s: func1(n, s) and func2(n, s)
-    elif op == 'not':
+    elif op == b'not':
         return lambda n, s: not _compile(tree[1])(n, s)
-    elif op == 'func':
+    elif op == b'func':
         symbols = {
-            'all': lambda n, s: True,
-            'none': lambda n, s: False,
-            'size': lambda n, s: _sizep(tree[2])(s),
+            b'all': lambda n, s: True,
+            b'none': lambda n, s: False,
+            b'size': lambda n, s: _sizep(tree[2])(s),
         }
 
         name = filesetlang.getsymbol(tree[1])
@@ -68,16 +70,16 @@
             return symbols[name]
 
         raise error.UnknownIdentifier(name, symbols.keys())
-    elif op == 'minus':  # equivalent to 'x and not y'
+    elif op == b'minus':  # equivalent to 'x and not y'
         func1 = _compile(tree[1])
         func2 = _compile(tree[2])
         return lambda n, s: func1(n, s) and not func2(n, s)
-    elif op == 'list':
+    elif op == b'list':
         raise error.ParseError(
-            _("can't use a list in this context"),
-            hint=_('see \'hg help "filesets.x or y"\''),
+            _(b"can't use a list in this context"),
+            hint=_(b'see \'hg help "filesets.x or y"\''),
         )
-    raise error.ProgrammingError('illegal tree: %r' % (tree,))
+    raise error.ProgrammingError(b'illegal tree: %r' % (tree,))
 
 
 def compile(text):
--- a/mercurial/minirst.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/minirst.py	Sun Oct 06 09:48:39 2019 -0400
@@ -32,23 +32,23 @@
 
 
 def section(s):
-    return "%s\n%s\n\n" % (s, "\"" * encoding.colwidth(s))
+    return b"%s\n%s\n\n" % (s, b"\"" * encoding.colwidth(s))
 
 
 def subsection(s):
-    return "%s\n%s\n\n" % (s, '=' * encoding.colwidth(s))
+    return b"%s\n%s\n\n" % (s, b'=' * encoding.colwidth(s))
 
 
 def subsubsection(s):
-    return "%s\n%s\n\n" % (s, "-" * encoding.colwidth(s))
+    return b"%s\n%s\n\n" % (s, b"-" * encoding.colwidth(s))
 
 
 def subsubsubsection(s):
-    return "%s\n%s\n\n" % (s, "." * encoding.colwidth(s))
+    return b"%s\n%s\n\n" % (s, b"." * encoding.colwidth(s))
 
 
 def subsubsubsubsection(s):
-    return "%s\n%s\n\n" % (s, "'" * encoding.colwidth(s))
+    return b"%s\n%s\n\n" % (s, b"'" * encoding.colwidth(s))
 
 
 def replace(text, substs):
@@ -85,12 +85,12 @@
     has an 'indent' field and a 'lines' field.
     """
     blocks = []
-    for b in _blockre.split(text.lstrip('\n').rstrip()):
+    for b in _blockre.split(text.lstrip(b'\n').rstrip()):
         lines = b.splitlines()
         if lines:
             indent = min((len(l) - len(l.lstrip())) for l in lines)
             lines = [l[indent:] for l in lines]
-            blocks.append({'indent': indent, 'lines': lines})
+            blocks.append({b'indent': indent, b'lines': lines})
     return blocks
 
 
@@ -111,44 +111,44 @@
         #    +---------------------------+
         #    | indented literal block    |
         #    +---------------------------+
-        blocks[i]['type'] = 'paragraph'
-        if blocks[i]['lines'][-1].endswith('::') and i + 1 < len(blocks):
-            indent = blocks[i]['indent']
-            adjustment = blocks[i + 1]['indent'] - indent
+        blocks[i][b'type'] = b'paragraph'
+        if blocks[i][b'lines'][-1].endswith(b'::') and i + 1 < len(blocks):
+            indent = blocks[i][b'indent']
+            adjustment = blocks[i + 1][b'indent'] - indent
 
-            if blocks[i]['lines'] == ['::']:
+            if blocks[i][b'lines'] == [b'::']:
                 # Expanded form: remove block
                 del blocks[i]
                 i -= 1
-            elif blocks[i]['lines'][-1].endswith(' ::'):
+            elif blocks[i][b'lines'][-1].endswith(b' ::'):
                 # Partially minimized form: remove space and both
                 # colons.
-                blocks[i]['lines'][-1] = blocks[i]['lines'][-1][:-3]
+                blocks[i][b'lines'][-1] = blocks[i][b'lines'][-1][:-3]
             elif (
-                len(blocks[i]['lines']) == 1
-                and blocks[i]['lines'][0].lstrip(' ').startswith('.. ')
-                and blocks[i]['lines'][0].find(' ', 3) == -1
+                len(blocks[i][b'lines']) == 1
+                and blocks[i][b'lines'][0].lstrip(b' ').startswith(b'.. ')
+                and blocks[i][b'lines'][0].find(b' ', 3) == -1
             ):
                 # directive on its own line, not a literal block
                 i += 1
                 continue
             else:
                 # Fully minimized form: remove just one colon.
-                blocks[i]['lines'][-1] = blocks[i]['lines'][-1][:-1]
+                blocks[i][b'lines'][-1] = blocks[i][b'lines'][-1][:-1]
 
             # List items are formatted with a hanging indent. We must
             # correct for this here while we still have the original
             # information on the indentation of the subsequent literal
             # blocks available.
-            m = _bulletre.match(blocks[i]['lines'][0])
+            m = _bulletre.match(blocks[i][b'lines'][0])
             if m:
                 indent += m.end()
                 adjustment -= m.end()
 
             # Mark the following indented blocks.
-            while i + 1 < len(blocks) and blocks[i + 1]['indent'] > indent:
-                blocks[i + 1]['type'] = 'literal'
-                blocks[i + 1]['indent'] -= adjustment
+            while i + 1 < len(blocks) and blocks[i + 1][b'indent'] > indent:
+                blocks[i + 1][b'type'] = b'literal'
+                blocks[i + 1][b'indent'] -= adjustment
                 i += 1
         i += 1
     return blocks
@@ -169,10 +169,10 @@
     # matters: definition lists has the least specific regexp and must
     # come last.
     listtypes = [
-        ('bullet', _bulletre, True),
-        ('option', _optionre, True),
-        ('field', _fieldre, True),
-        ('definition', _definitionre, False),
+        (b'bullet', _bulletre, True),
+        (b'option', _optionre, True),
+        (b'field', _fieldre, True),
+        (b'definition', _definitionre, False),
     ]
 
     def match(lines, i, itemre, singleline):
@@ -182,18 +182,18 @@
         item (but only if singleline is True).
         """
         line1 = lines[i]
-        line2 = i + 1 < len(lines) and lines[i + 1] or ''
+        line2 = i + 1 < len(lines) and lines[i + 1] or b''
         if not itemre.match(line1):
             return False
         if singleline:
-            return line2 == '' or line2[0:1] == ' ' or itemre.match(line2)
+            return line2 == b'' or line2[0:1] == b' ' or itemre.match(line2)
         else:
-            return line2.startswith(' ')
+            return line2.startswith(b' ')
 
     i = 0
     while i < len(blocks):
-        if blocks[i]['type'] == 'paragraph':
-            lines = blocks[i]['lines']
+        if blocks[i][b'type'] == b'paragraph':
+            lines = blocks[i][b'lines']
             for type, itemre, singleline in listtypes:
                 if match(lines, 0, itemre, singleline):
                     items = []
@@ -201,12 +201,12 @@
                         if match(lines, j, itemre, singleline):
                             items.append(
                                 {
-                                    'type': type,
-                                    'lines': [],
-                                    'indent': blocks[i]['indent'],
+                                    b'type': type,
+                                    b'lines': [],
+                                    b'indent': blocks[i][b'indent'],
                                 }
                             )
-                        items[-1]['lines'].append(line)
+                        items[-1][b'lines'].append(line)
                     blocks[i : i + 1] = items
                     break
         i += 1
@@ -220,16 +220,16 @@
     """Find key for field lists."""
     i = 0
     while i < len(blocks):
-        if blocks[i]['type'] != 'field':
+        if blocks[i][b'type'] != b'field':
             i += 1
             continue
 
         j = i
-        while j < len(blocks) and blocks[j]['type'] == 'field':
-            m = _fieldre.match(blocks[j]['lines'][0])
+        while j < len(blocks) and blocks[j][b'type'] == b'field':
+            m = _fieldre.match(blocks[j][b'lines'][0])
             key, rest = m.groups()
-            blocks[j]['lines'][0] = rest
-            blocks[j]['key'] = key
+            blocks[j][b'lines'][0] = rest
+            blocks[j][b'key'] = key
             j += 1
 
         i = j + 1
@@ -240,37 +240,37 @@
 def updateoptionlists(blocks):
     i = 0
     while i < len(blocks):
-        if blocks[i]['type'] != 'option':
+        if blocks[i][b'type'] != b'option':
             i += 1
             continue
 
         optstrwidth = 0
         j = i
-        while j < len(blocks) and blocks[j]['type'] == 'option':
-            m = _optionre.match(blocks[j]['lines'][0])
+        while j < len(blocks) and blocks[j][b'type'] == b'option':
+            m = _optionre.match(blocks[j][b'lines'][0])
 
             shortoption = m.group(2)
             group3 = m.group(3)
             longoption = group3[2:].strip()
             desc = m.group(6).strip()
             longoptionarg = m.group(5).strip()
-            blocks[j]['lines'][0] = desc
+            blocks[j][b'lines'][0] = desc
 
-            noshortop = ''
+            noshortop = b''
             if not shortoption:
-                noshortop = '   '
+                noshortop = b'   '
 
-            opt = "%s%s" % (
-                shortoption and "-%s " % shortoption or '',
-                "%s--%s %s" % (noshortop, longoption, longoptionarg),
+            opt = b"%s%s" % (
+                shortoption and b"-%s " % shortoption or b'',
+                b"%s--%s %s" % (noshortop, longoption, longoptionarg),
             )
             opt = opt.rstrip()
-            blocks[j]['optstr'] = opt
+            blocks[j][b'optstr'] = opt
             optstrwidth = max(optstrwidth, encoding.colwidth(opt))
             j += 1
 
         for block in blocks[i:j]:
-            block['optstrwidth'] = optstrwidth
+            block[b'optstrwidth'] = optstrwidth
         i = j + 1
     return blocks
 
@@ -291,15 +291,15 @@
         # +---+                               |
         #     | blocks                        |
         #     +-------------------------------+
-        if blocks[i]['type'] == 'paragraph' and blocks[i]['lines'][
+        if blocks[i][b'type'] == b'paragraph' and blocks[i][b'lines'][
             0
-        ].startswith('.. container::'):
-            indent = blocks[i]['indent']
-            adjustment = blocks[i + 1]['indent'] - indent
-            containertype = blocks[i]['lines'][0][15:]
+        ].startswith(b'.. container::'):
+            indent = blocks[i][b'indent']
+            adjustment = blocks[i + 1][b'indent'] - indent
+            containertype = blocks[i][b'lines'][0][15:]
             prune = True
             for c in keep:
-                if c in containertype.split('.'):
+                if c in containertype.split(b'.'):
                     prune = False
             if prune:
                 pruned.append(containertype)
@@ -308,11 +308,11 @@
             del blocks[i]
             j = i
             i -= 1
-            while j < len(blocks) and blocks[j]['indent'] > indent:
+            while j < len(blocks) and blocks[j][b'indent'] > indent:
                 if prune:
                     del blocks[j]
                 else:
-                    blocks[j]['indent'] -= adjustment
+                    blocks[j][b'indent'] -= adjustment
                     j += 1
         i += 1
     return blocks, pruned
@@ -337,26 +337,26 @@
         #  x    y   z
         # === ==== ===
         if (
-            block['type'] == 'paragraph'
-            and len(block['lines']) > 2
-            and _tablere.match(block['lines'][0])
-            and block['lines'][0] == block['lines'][-1]
+            block[b'type'] == b'paragraph'
+            and len(block[b'lines']) > 2
+            and _tablere.match(block[b'lines'][0])
+            and block[b'lines'][0] == block[b'lines'][-1]
         ):
-            block['type'] = 'table'
-            block['header'] = False
-            div = block['lines'][0]
+            block[b'type'] = b'table'
+            block[b'header'] = False
+            div = block[b'lines'][0]
 
             # column markers are ASCII so we can calculate column
             # position in bytes
             columns = [
                 x
                 for x in pycompat.xrange(len(div))
-                if div[x : x + 1] == '=' and (x == 0 or div[x - 1 : x] == ' ')
+                if div[x : x + 1] == b'=' and (x == 0 or div[x - 1 : x] == b' ')
             ]
             rows = []
-            for l in block['lines'][1:-1]:
+            for l in block[b'lines'][1:-1]:
                 if l == div:
-                    block['header'] = True
+                    block[b'header'] = True
                     continue
                 row = []
                 # we measure columns not in bytes or characters but in
@@ -372,7 +372,7 @@
                         row.append(l[pos:].strip())
                 rows.append(row)
 
-            block['table'] = rows
+            block[b'table'] = rows
 
     return blocks
 
@@ -391,34 +391,34 @@
         # | -------------                |
         # +------------------------------+
         if (
-            block['type'] == 'paragraph'
-            and len(block['lines']) == 2
-            and encoding.colwidth(block['lines'][0]) == len(block['lines'][1])
-            and _sectionre.match(block['lines'][1])
+            block[b'type'] == b'paragraph'
+            and len(block[b'lines']) == 2
+            and encoding.colwidth(block[b'lines'][0]) == len(block[b'lines'][1])
+            and _sectionre.match(block[b'lines'][1])
         ):
-            block['underline'] = block['lines'][1][0:1]
-            block['type'] = 'section'
-            del block['lines'][1]
+            block[b'underline'] = block[b'lines'][1][0:1]
+            block[b'type'] = b'section'
+            del block[b'lines'][1]
     return blocks
 
 
 def inlineliterals(blocks):
-    substs = [('``', '"')]
+    substs = [(b'``', b'"')]
     for b in blocks:
-        if b['type'] in ('paragraph', 'section'):
-            b['lines'] = [replace(l, substs) for l in b['lines']]
+        if b[b'type'] in (b'paragraph', b'section'):
+            b[b'lines'] = [replace(l, substs) for l in b[b'lines']]
     return blocks
 
 
 def hgrole(blocks):
-    substs = [(':hg:`', "'hg "), ('`', "'")]
+    substs = [(b':hg:`', b"'hg "), (b'`', b"'")]
     for b in blocks:
-        if b['type'] in ('paragraph', 'section'):
+        if b[b'type'] in (b'paragraph', b'section'):
             # Turn :hg:`command` into "hg command". This also works
             # when there is a line break in the command and relies on
             # the fact that we have no stray back-quotes in the input
             # (run the blocks through inlineliterals first).
-            b['lines'] = [replace(l, substs) for l in b['lines']]
+            b[b'lines'] = [replace(l, substs) for l in b[b'lines']]
     return blocks
 
 
@@ -430,17 +430,17 @@
     """
     i = 1
     while i < len(blocks):
-        if blocks[i]['type'] == blocks[i - 1]['type'] and blocks[i]['type'] in (
-            'bullet',
-            'option',
-            'field',
-        ):
+        if blocks[i][b'type'] == blocks[i - 1][b'type'] and blocks[i][
+            b'type'
+        ] in (b'bullet', b'option', b'field',):
             i += 1
-        elif not blocks[i - 1]['lines']:
+        elif not blocks[i - 1][b'lines']:
             # no lines in previous block, do not separate
             i += 1
         else:
-            blocks.insert(i, {'lines': [''], 'indent': 0, 'type': 'margin'})
+            blocks.insert(
+                i, {b'lines': [b''], b'indent': 0, b'type': b'margin'}
+            )
             i += 2
     return blocks
 
@@ -450,11 +450,11 @@
     i = 0
     while i < len(blocks):
         b = blocks[i]
-        if b['type'] == 'paragraph' and (
-            b['lines'][0].startswith('.. ') or b['lines'] == ['..']
+        if b[b'type'] == b'paragraph' and (
+            b[b'lines'][0].startswith(b'.. ') or b[b'lines'] == [b'..']
         ):
             del blocks[i]
-            if i < len(blocks) and blocks[i]['type'] == 'margin':
+            if i < len(blocks) and blocks[i][b'type'] == b'margin':
                 del blocks[i]
         else:
             i += 1
@@ -469,47 +469,47 @@
     admonitions = admonitions or _admonitiontitles.keys()
 
     admonitionre = re.compile(
-        br'\.\. (%s)::' % '|'.join(sorted(admonitions)), flags=re.IGNORECASE
+        br'\.\. (%s)::' % b'|'.join(sorted(admonitions)), flags=re.IGNORECASE
     )
 
     i = 0
     while i < len(blocks):
-        m = admonitionre.match(blocks[i]['lines'][0])
+        m = admonitionre.match(blocks[i][b'lines'][0])
         if m:
-            blocks[i]['type'] = 'admonition'
-            admonitiontitle = blocks[i]['lines'][0][3 : m.end() - 2].lower()
+            blocks[i][b'type'] = b'admonition'
+            admonitiontitle = blocks[i][b'lines'][0][3 : m.end() - 2].lower()
 
-            firstline = blocks[i]['lines'][0][m.end() + 1 :]
+            firstline = blocks[i][b'lines'][0][m.end() + 1 :]
             if firstline:
-                blocks[i]['lines'].insert(1, '   ' + firstline)
+                blocks[i][b'lines'].insert(1, b'   ' + firstline)
 
-            blocks[i]['admonitiontitle'] = admonitiontitle
-            del blocks[i]['lines'][0]
+            blocks[i][b'admonitiontitle'] = admonitiontitle
+            del blocks[i][b'lines'][0]
         i = i + 1
     return blocks
 
 
 _admonitiontitles = {
-    'attention': _('Attention:'),
-    'caution': _('Caution:'),
-    'danger': _('!Danger!'),
-    'error': _('Error:'),
-    'hint': _('Hint:'),
-    'important': _('Important:'),
-    'note': _('Note:'),
-    'tip': _('Tip:'),
-    'warning': _('Warning!'),
+    b'attention': _(b'Attention:'),
+    b'caution': _(b'Caution:'),
+    b'danger': _(b'!Danger!'),
+    b'error': _(b'Error:'),
+    b'hint': _(b'Hint:'),
+    b'important': _(b'Important:'),
+    b'note': _(b'Note:'),
+    b'tip': _(b'Tip:'),
+    b'warning': _(b'Warning!'),
 }
 
 
 def formatoption(block, width):
-    desc = ' '.join(map(bytes.strip, block['lines']))
-    colwidth = encoding.colwidth(block['optstr'])
+    desc = b' '.join(map(bytes.strip, block[b'lines']))
+    colwidth = encoding.colwidth(block[b'optstr'])
     usablewidth = width - 1
-    hanging = block['optstrwidth']
-    initindent = '%s%s  ' % (block['optstr'], ' ' * ((hanging - colwidth)))
-    hangindent = ' ' * (encoding.colwidth(initindent) + 1)
-    return ' %s\n' % (
+    hanging = block[b'optstrwidth']
+    initindent = b'%s%s  ' % (block[b'optstr'], b' ' * ((hanging - colwidth)))
+    hangindent = b' ' * (encoding.colwidth(initindent) + 1)
+    return b' %s\n' % (
         stringutil.wrap(
             desc, usablewidth, initindent=initindent, hangindent=hangindent
         )
@@ -520,91 +520,91 @@
     """Format a block according to width."""
     if width <= 0:
         width = 78
-    indent = ' ' * block['indent']
-    if block['type'] == 'admonition':
-        admonition = _admonitiontitles[block['admonitiontitle']]
-        if not block['lines']:
-            return indent + admonition + '\n'
-        hang = len(block['lines'][-1]) - len(block['lines'][-1].lstrip())
+    indent = b' ' * block[b'indent']
+    if block[b'type'] == b'admonition':
+        admonition = _admonitiontitles[block[b'admonitiontitle']]
+        if not block[b'lines']:
+            return indent + admonition + b'\n'
+        hang = len(block[b'lines'][-1]) - len(block[b'lines'][-1].lstrip())
 
-        defindent = indent + hang * ' '
-        text = ' '.join(map(bytes.strip, block['lines']))
-        return '%s\n%s\n' % (
+        defindent = indent + hang * b' '
+        text = b' '.join(map(bytes.strip, block[b'lines']))
+        return b'%s\n%s\n' % (
             indent + admonition,
             stringutil.wrap(
                 text, width=width, initindent=defindent, hangindent=defindent
             ),
         )
-    if block['type'] == 'margin':
-        return '\n'
-    if block['type'] == 'literal':
-        indent += '  '
-        return indent + ('\n' + indent).join(block['lines']) + '\n'
-    if block['type'] == 'section':
-        underline = encoding.colwidth(block['lines'][0]) * block['underline']
-        return "%s%s\n%s%s\n" % (indent, block['lines'][0], indent, underline)
-    if block['type'] == 'table':
-        table = block['table']
+    if block[b'type'] == b'margin':
+        return b'\n'
+    if block[b'type'] == b'literal':
+        indent += b'  '
+        return indent + (b'\n' + indent).join(block[b'lines']) + b'\n'
+    if block[b'type'] == b'section':
+        underline = encoding.colwidth(block[b'lines'][0]) * block[b'underline']
+        return b"%s%s\n%s%s\n" % (indent, block[b'lines'][0], indent, underline)
+    if block[b'type'] == b'table':
+        table = block[b'table']
         # compute column widths
         widths = [max([encoding.colwidth(e) for e in c]) for c in zip(*table)]
-        text = ''
+        text = b''
         span = sum(widths) + len(widths) - 1
-        indent = ' ' * block['indent']
-        hang = ' ' * (len(indent) + span - widths[-1])
+        indent = b' ' * block[b'indent']
+        hang = b' ' * (len(indent) + span - widths[-1])
 
         for row in table:
             l = []
             for w, v in zip(widths, row):
-                pad = ' ' * (w - encoding.colwidth(v))
+                pad = b' ' * (w - encoding.colwidth(v))
                 l.append(v + pad)
-            l = ' '.join(l)
+            l = b' '.join(l)
             l = stringutil.wrap(
                 l, width=width, initindent=indent, hangindent=hang
             )
-            if not text and block['header']:
-                text = l + '\n' + indent + '-' * (min(width, span)) + '\n'
+            if not text and block[b'header']:
+                text = l + b'\n' + indent + b'-' * (min(width, span)) + b'\n'
             else:
-                text += l + "\n"
+                text += l + b"\n"
         return text
-    if block['type'] == 'definition':
-        term = indent + block['lines'][0]
-        hang = len(block['lines'][-1]) - len(block['lines'][-1].lstrip())
-        defindent = indent + hang * ' '
-        text = ' '.join(map(bytes.strip, block['lines'][1:]))
-        return '%s\n%s\n' % (
+    if block[b'type'] == b'definition':
+        term = indent + block[b'lines'][0]
+        hang = len(block[b'lines'][-1]) - len(block[b'lines'][-1].lstrip())
+        defindent = indent + hang * b' '
+        text = b' '.join(map(bytes.strip, block[b'lines'][1:]))
+        return b'%s\n%s\n' % (
             term,
             stringutil.wrap(
                 text, width=width, initindent=defindent, hangindent=defindent
             ),
         )
     subindent = indent
-    if block['type'] == 'bullet':
-        if block['lines'][0].startswith('| '):
+    if block[b'type'] == b'bullet':
+        if block[b'lines'][0].startswith(b'| '):
             # Remove bullet for line blocks and add no extra
             # indentation.
-            block['lines'][0] = block['lines'][0][2:]
+            block[b'lines'][0] = block[b'lines'][0][2:]
         else:
-            m = _bulletre.match(block['lines'][0])
-            subindent = indent + m.end() * ' '
-    elif block['type'] == 'field':
-        key = block['key']
-        subindent = indent + _fieldwidth * ' '
+            m = _bulletre.match(block[b'lines'][0])
+            subindent = indent + m.end() * b' '
+    elif block[b'type'] == b'field':
+        key = block[b'key']
+        subindent = indent + _fieldwidth * b' '
         if len(key) + 2 > _fieldwidth:
             # key too large, use full line width
             key = key.ljust(width)
         else:
             # key fits within field width
             key = key.ljust(_fieldwidth)
-        block['lines'][0] = key + block['lines'][0]
-    elif block['type'] == 'option':
+        block[b'lines'][0] = key + block[b'lines'][0]
+    elif block[b'type'] == b'option':
         return formatoption(block, width)
 
-    text = ' '.join(map(bytes.strip, block['lines']))
+    text = b' '.join(map(bytes.strip, block[b'lines']))
     return (
         stringutil.wrap(
             text, width=width, initindent=indent, hangindent=subindent
         )
-        + '\n'
+        + b'\n'
     )
 
 
@@ -612,7 +612,7 @@
     """Format RST blocks as HTML"""
 
     out = []
-    headernest = ''
+    headernest = b''
     listnest = []
 
     def escape(s):
@@ -621,91 +621,91 @@
     def openlist(start, level):
         if not listnest or listnest[-1][0] != start:
             listnest.append((start, level))
-            out.append('<%s>\n' % start)
+            out.append(b'<%s>\n' % start)
 
-    blocks = [b for b in blocks if b['type'] != 'margin']
+    blocks = [b for b in blocks if b[b'type'] != b'margin']
 
     for pos, b in enumerate(blocks):
-        btype = b['type']
-        level = b['indent']
-        lines = b['lines']
+        btype = b[b'type']
+        level = b[b'indent']
+        lines = b[b'lines']
 
-        if btype == 'admonition':
-            admonition = escape(_admonitiontitles[b['admonitiontitle']])
-            text = escape(' '.join(map(bytes.strip, lines)))
-            out.append('<p>\n<b>%s</b> %s\n</p>\n' % (admonition, text))
-        elif btype == 'paragraph':
-            out.append('<p>\n%s\n</p>\n' % escape('\n'.join(lines)))
-        elif btype == 'margin':
+        if btype == b'admonition':
+            admonition = escape(_admonitiontitles[b[b'admonitiontitle']])
+            text = escape(b' '.join(map(bytes.strip, lines)))
+            out.append(b'<p>\n<b>%s</b> %s\n</p>\n' % (admonition, text))
+        elif btype == b'paragraph':
+            out.append(b'<p>\n%s\n</p>\n' % escape(b'\n'.join(lines)))
+        elif btype == b'margin':
             pass
-        elif btype == 'literal':
-            out.append('<pre>\n%s\n</pre>\n' % escape('\n'.join(lines)))
-        elif btype == 'section':
-            i = b['underline']
+        elif btype == b'literal':
+            out.append(b'<pre>\n%s\n</pre>\n' % escape(b'\n'.join(lines)))
+        elif btype == b'section':
+            i = b[b'underline']
             if i not in headernest:
                 headernest += i
             level = headernest.index(i) + 1
-            out.append('<h%d>%s</h%d>\n' % (level, escape(lines[0]), level))
-        elif btype == 'table':
-            table = b['table']
-            out.append('<table>\n')
+            out.append(b'<h%d>%s</h%d>\n' % (level, escape(lines[0]), level))
+        elif btype == b'table':
+            table = b[b'table']
+            out.append(b'<table>\n')
             for row in table:
-                out.append('<tr>')
+                out.append(b'<tr>')
                 for v in row:
-                    out.append('<td>')
+                    out.append(b'<td>')
                     out.append(escape(v))
-                    out.append('</td>')
-                    out.append('\n')
+                    out.append(b'</td>')
+                    out.append(b'\n')
                 out.pop()
-                out.append('</tr>\n')
-            out.append('</table>\n')
-        elif btype == 'definition':
-            openlist('dl', level)
+                out.append(b'</tr>\n')
+            out.append(b'</table>\n')
+        elif btype == b'definition':
+            openlist(b'dl', level)
             term = escape(lines[0])
-            text = escape(' '.join(map(bytes.strip, lines[1:])))
-            out.append(' <dt>%s\n <dd>%s\n' % (term, text))
-        elif btype == 'bullet':
-            bullet, head = lines[0].split(' ', 1)
-            if bullet in ('*', '-'):
-                openlist('ul', level)
+            text = escape(b' '.join(map(bytes.strip, lines[1:])))
+            out.append(b' <dt>%s\n <dd>%s\n' % (term, text))
+        elif btype == b'bullet':
+            bullet, head = lines[0].split(b' ', 1)
+            if bullet in (b'*', b'-'):
+                openlist(b'ul', level)
             else:
-                openlist('ol', level)
-            out.append(' <li> %s\n' % escape(' '.join([head] + lines[1:])))
-        elif btype == 'field':
-            openlist('dl', level)
-            key = escape(b['key'])
-            text = escape(' '.join(map(bytes.strip, lines)))
-            out.append(' <dt>%s\n <dd>%s\n' % (key, text))
-        elif btype == 'option':
-            openlist('dl', level)
-            opt = escape(b['optstr'])
-            desc = escape(' '.join(map(bytes.strip, lines)))
-            out.append(' <dt>%s\n <dd>%s\n' % (opt, desc))
+                openlist(b'ol', level)
+            out.append(b' <li> %s\n' % escape(b' '.join([head] + lines[1:])))
+        elif btype == b'field':
+            openlist(b'dl', level)
+            key = escape(b[b'key'])
+            text = escape(b' '.join(map(bytes.strip, lines)))
+            out.append(b' <dt>%s\n <dd>%s\n' % (key, text))
+        elif btype == b'option':
+            openlist(b'dl', level)
+            opt = escape(b[b'optstr'])
+            desc = escape(b' '.join(map(bytes.strip, lines)))
+            out.append(b' <dt>%s\n <dd>%s\n' % (opt, desc))
 
         # close lists if indent level of next block is lower
         if listnest:
             start, level = listnest[-1]
             if pos == len(blocks) - 1:
-                out.append('</%s>\n' % start)
+                out.append(b'</%s>\n' % start)
                 listnest.pop()
             else:
                 nb = blocks[pos + 1]
-                ni = nb['indent']
+                ni = nb[b'indent']
                 if ni < level or (
                     ni == level
-                    and nb['type'] not in 'definition bullet field option'
+                    and nb[b'type'] not in b'definition bullet field option'
                 ):
-                    out.append('</%s>\n' % start)
+                    out.append(b'</%s>\n' % start)
                     listnest.pop()
 
-    return ''.join(out)
+    return b''.join(out)
 
 
 def parse(text, indent=0, keep=None, admonitions=None):
     """Parse text into a list of blocks"""
     blocks = findblocks(text)
     for b in blocks:
-        b['indent'] += indent
+        b[b'indent'] += indent
     blocks = findliteralblocks(blocks)
     blocks = findtables(blocks)
     blocks, pruned = prunecontainers(blocks, keep or [])
@@ -722,21 +722,21 @@
 
 
 def formatblocks(blocks, width):
-    text = ''.join(formatblock(b, width) for b in blocks)
+    text = b''.join(formatblock(b, width) for b in blocks)
     return text
 
 
 def formatplain(blocks, width):
     """Format parsed blocks as plain text"""
-    return ''.join(formatblock(b, width) for b in blocks)
+    return b''.join(formatblock(b, width) for b in blocks)
 
 
-def format(text, width=80, indent=0, keep=None, style='plain', section=None):
+def format(text, width=80, indent=0, keep=None, style=b'plain', section=None):
     """Parse and format the text according to width."""
     blocks, pruned = parse(text, indent, keep or [])
     if section:
         blocks = filtersections(blocks, section)
-    if style == 'html':
+    if style == b'html':
         return formathtml(blocks)
     else:
         return formatplain(blocks, width=width)
@@ -759,7 +759,7 @@
         path, nest, b = sections[i]
         del parents[nest:]
         parents.append(i)
-        if path == section or path.endswith('.' + section):
+        if path == section or path.endswith(b'.' + section):
             if lastparents != parents:
                 llen = len(lastparents)
                 plen = len(parents)
@@ -787,11 +787,11 @@
     if collapse:
         synthetic.reverse()
         for s in synthetic:
-            path = [blocks[syn]['lines'][0] for syn in s]
+            path = [blocks[syn][b'lines'][0] for syn in s]
             real = s[-1] + 2
-            realline = blocks[real]['lines']
-            realline[0] = '"%s"' % '.'.join(path + [realline[0]]).replace(
-                '"', ''
+            realline = blocks[real][b'lines']
+            realline[0] = b'"%s"' % b'.'.join(path + [realline[0]]).replace(
+                b'"', b''
             )
             del blocks[s[0] : real]
 
@@ -800,31 +800,31 @@
 
 def _getsections(blocks):
     '''return a list of (section path, nesting level, blocks) tuples'''
-    nest = ""
+    nest = b""
     names = ()
     secs = []
 
     def getname(b):
-        if b['type'] == 'field':
-            x = b['key']
+        if b[b'type'] == b'field':
+            x = b[b'key']
         else:
-            x = b['lines'][0]
-        x = encoding.lower(x).strip('"')
-        if '(' in x:
-            x = x.split('(')[0]
+            x = b[b'lines'][0]
+        x = encoding.lower(x).strip(b'"')
+        if b'(' in x:
+            x = x.split(b'(')[0]
         return x
 
     for b in blocks:
-        if b['type'] == 'section':
-            i = b['underline']
+        if b[b'type'] == b'section':
+            i = b[b'underline']
             if i not in nest:
                 nest += i
             level = nest.index(i) + 1
             nest = nest[:level]
             names = names[:level] + (getname(b),)
-            secs.append(('.'.join(names), level, [b]))
-        elif b['type'] in ('definition', 'field'):
-            i = ' '
+            secs.append((b'.'.join(names), level, [b]))
+        elif b[b'type'] in (b'definition', b'field'):
+            i = b' '
             if i not in nest:
                 nest += i
             level = nest.index(i) + 1
@@ -833,10 +833,10 @@
                 sec = secs[-i]
                 if sec[1] < level:
                     break
-                siblings = [a for a in sec[2] if a['type'] == 'definition']
+                siblings = [a for a in sec[2] if a[b'type'] == b'definition']
                 if siblings:
-                    siblingindent = siblings[-1]['indent']
-                    indent = b['indent']
+                    siblingindent = siblings[-1][b'indent']
+                    indent = b[b'indent']
                     if siblingindent < indent:
                         level += 1
                         break
@@ -844,30 +844,30 @@
                         level = sec[1]
                         break
             names = names[:level] + (getname(b),)
-            secs.append(('.'.join(names), level, [b]))
+            secs.append((b'.'.join(names), level, [b]))
         else:
             if not secs:
                 # add an initial empty section
-                secs = [('', 0, [])]
-            if b['type'] != 'margin':
+                secs = [(b'', 0, [])]
+            if b[b'type'] != b'margin':
                 pointer = 1
-                bindent = b['indent']
+                bindent = b[b'indent']
                 while pointer < len(secs):
                     section = secs[-pointer][2][0]
-                    if section['type'] != 'margin':
-                        sindent = section['indent']
-                        if len(section['lines']) > 1:
-                            sindent += len(section['lines'][1]) - len(
-                                section['lines'][1].lstrip(' ')
+                    if section[b'type'] != b'margin':
+                        sindent = section[b'indent']
+                        if len(section[b'lines']) > 1:
+                            sindent += len(section[b'lines'][1]) - len(
+                                section[b'lines'][1].lstrip(b' ')
                             )
                         if bindent >= sindent:
                             break
                     pointer += 1
                 if pointer > 1:
                     blevel = secs[-pointer][1]
-                    if section['type'] != b['type']:
+                    if section[b'type'] != b[b'type']:
                         blevel += 1
-                    secs.append(('', blevel, []))
+                    secs.append((b'', blevel, []))
             secs[-1][2].append(b)
     return secs
 
@@ -876,20 +876,20 @@
     '''Generate an RST table for the given table data as a list of lines'''
 
     widths = [max(encoding.colwidth(e) for e in c) for c in zip(*data)]
-    indent = ' ' * indent
-    div = indent + ' '.join('=' * w for w in widths) + '\n'
+    indent = b' ' * indent
+    div = indent + b' '.join(b'=' * w for w in widths) + b'\n'
 
     out = [div]
     for row in data:
         l = []
         for w, v in zip(widths, row):
-            if '\n' in v:
+            if b'\n' in v:
                 # only remove line breaks and indentation, long lines are
                 # handled by the next tool
-                v = ' '.join(e.lstrip() for e in v.split('\n'))
-            pad = ' ' * (w - encoding.colwidth(v))
+                v = b' '.join(e.lstrip() for e in v.split(b'\n'))
+            pad = b' ' * (w - encoding.colwidth(v))
             l.append(v + pad)
-        out.append(indent + ' '.join(l) + "\n")
+        out.append(indent + b' '.join(l) + b"\n")
     if header and len(data) > 1:
         out.insert(2, div)
     out.append(div)
--- a/mercurial/namespaces.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/namespaces.py	Sun Oct 06 09:48:39 2019 -0400
@@ -36,9 +36,9 @@
         bmknamemap = lambda repo, name: tolist(repo._bookmarks.get(name))
         bmknodemap = lambda repo, node: repo.nodebookmarks(node)
         n = namespace(
-            "bookmarks",
-            templatename="bookmark",
-            logfmt=columns['bookmark'],
+            b"bookmarks",
+            templatename=b"bookmark",
+            logfmt=columns[b'bookmark'],
             listnames=bmknames,
             namemap=bmknamemap,
             nodemap=bmknodemap,
@@ -50,13 +50,13 @@
         tagnamemap = lambda repo, name: tolist(repo._tagscache.tags.get(name))
         tagnodemap = lambda repo, node: repo.nodetags(node)
         n = namespace(
-            "tags",
-            templatename="tag",
-            logfmt=columns['tag'],
+            b"tags",
+            templatename=b"tag",
+            logfmt=columns[b'tag'],
             listnames=tagnames,
             namemap=tagnamemap,
             nodemap=tagnodemap,
-            deprecated={'tip'},
+            deprecated={b'tip'},
             builtin=True,
         )
         self.addnamespace(n)
@@ -65,9 +65,9 @@
         bnamemap = lambda repo, name: tolist(repo.branchtip(name, True))
         bnodemap = lambda repo, node: [repo[node].branch()]
         n = namespace(
-            "branches",
-            templatename="branch",
-            logfmt=columns['branch'],
+            b"branches",
+            templatename=b"branch",
+            logfmt=columns[b'branch'],
             listnames=bnames,
             namemap=bnamemap,
             nodemap=bnodemap,
@@ -104,7 +104,7 @@
         if namespace.name not in templatekw.keywords:
             templatekeyword = registrar.templatekeyword(templatekw.keywords)
 
-            @templatekeyword(namespace.name, requires={'repo', 'ctx'})
+            @templatekeyword(namespace.name, requires={b'repo', b'ctx'})
             def generatekw(context, mapping):
                 return templatekw.shownames(context, mapping, namespace.name)
 
@@ -120,7 +120,7 @@
             n = v.singlenode(repo, name)
             if n:
                 return n
-        raise KeyError(_('no such name: %s') % name)
+        raise KeyError(_(b'no such name: %s') % name)
 
 
 class namespace(object):
@@ -204,7 +204,7 @@
         # if logfmt is not specified, compose it from logname as backup
         if self.logfmt is None:
             # i18n: column positioning for "hg log"
-            self.logfmt = ("%s:" % self.logname).ljust(13) + "%s\n"
+            self.logfmt = (b"%s:" % self.logname).ljust(13) + b"%s\n"
 
         if deprecated is None:
             self.deprecated = set()
--- a/mercurial/narrowspec.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/narrowspec.py	Sun Oct 06 09:48:39 2019 -0400
@@ -19,9 +19,9 @@
 )
 
 # The file in .hg/store/ that indicates which paths exit in the store
-FILENAME = 'narrowspec'
+FILENAME = b'narrowspec'
 # The file in .hg/ that indicates which paths exit in the dirstate
-DIRSTATE_FILENAME = 'narrowspec.dirstate'
+DIRSTATE_FILENAME = b'narrowspec.dirstate'
 
 # Pattern prefixes that are allowed in narrow patterns. This list MUST
 # only contain patterns that are fast and safe to evaluate. Keep in mind
@@ -40,7 +40,7 @@
 
     Returns a tuple with the normalized kind and normalized pattern.
     """
-    pat = pat.rstrip('/')
+    pat = pat.rstrip(b'/')
     _validatepattern(pat)
     return kind, pat
 
@@ -50,7 +50,7 @@
     # We use splitlines because it is Unicode-friendly and thus Python 3
     # compatible. However, it does not count empty lines at the end, so trick
     # it by adding a character at the end.
-    return len((s + 'x').splitlines())
+    return len((s + b'x').splitlines())
 
 
 def _validatepattern(pat):
@@ -63,20 +63,22 @@
     # We use newlines as separators in the narrowspec file, so don't allow them
     # in patterns.
     if _numlines(pat) > 1:
-        raise error.Abort(_('newlines are not allowed in narrowspec paths'))
+        raise error.Abort(_(b'newlines are not allowed in narrowspec paths'))
 
-    components = pat.split('/')
-    if '.' in components or '..' in components:
-        raise error.Abort(_('"." and ".." are not allowed in narrowspec paths'))
+    components = pat.split(b'/')
+    if b'.' in components or b'..' in components:
+        raise error.Abort(
+            _(b'"." and ".." are not allowed in narrowspec paths')
+        )
 
 
-def normalizepattern(pattern, defaultkind='path'):
+def normalizepattern(pattern, defaultkind=b'path'):
     """Returns the normalized version of a text-format pattern.
 
     If the pattern has no kind, the default will be added.
     """
     kind, pat = matchmod._patsplit(pattern, defaultkind)
-    return '%s:%s' % normalizesplitpattern(kind, pat)
+    return b'%s:%s' % normalizesplitpattern(kind, pat)
 
 
 def parsepatterns(pats):
@@ -107,7 +109,7 @@
     """
     if not isinstance(pats, set):
         raise error.ProgrammingError(
-            'narrow patterns should be a set; ' 'got %r' % pats
+            b'narrow patterns should be a set; ' b'got %r' % pats
         )
 
     for pat in pats:
@@ -115,22 +117,22 @@
             # Use a Mercurial exception because this can happen due to user
             # bugs (e.g. manually updating spec file).
             raise error.Abort(
-                _('invalid prefix on narrow pattern: %s') % pat,
+                _(b'invalid prefix on narrow pattern: %s') % pat,
                 hint=_(
-                    'narrow patterns must begin with one of '
-                    'the following: %s'
+                    b'narrow patterns must begin with one of '
+                    b'the following: %s'
                 )
-                % ', '.join(VALID_PREFIXES),
+                % b', '.join(VALID_PREFIXES),
             )
 
 
 def format(includes, excludes):
-    output = '[include]\n'
+    output = b'[include]\n'
     for i in sorted(includes - excludes):
-        output += i + '\n'
-    output += '[exclude]\n'
+        output += i + b'\n'
+    output += b'[exclude]\n'
     for e in sorted(excludes):
-        output += e + '\n'
+        output += e + b'\n'
     return output
 
 
@@ -141,18 +143,18 @@
         # the nevermatcher.
         return matchmod.never()
     return matchmod.match(
-        root, '', [], include=include or [], exclude=exclude or []
+        root, b'', [], include=include or [], exclude=exclude or []
     )
 
 
 def parseconfig(ui, spec):
     # maybe we should care about the profiles returned too
-    includepats, excludepats, profiles = sparse.parseconfig(ui, spec, 'narrow')
+    includepats, excludepats, profiles = sparse.parseconfig(ui, spec, b'narrow')
     if profiles:
         raise error.Abort(
             _(
-                "including other spec files using '%include' is not"
-                " supported in narrowspec"
+                b"including other spec files using '%include' is not"
+                b" supported in narrowspec"
             )
         )
 
@@ -251,7 +253,7 @@
     invalid_includes = []
     if not req_includes:
         res_includes = set(repo_includes)
-    elif 'path:.' not in repo_includes:
+    elif b'path:.' not in repo_includes:
         res_includes = []
         for req_include in req_includes:
             req_include = util.expandpath(util.normpath(req_include))
@@ -260,14 +262,14 @@
                 continue
             valid = False
             for repo_include in repo_includes:
-                if req_include.startswith(repo_include + '/'):
+                if req_include.startswith(repo_include + b'/'):
                     valid = True
                     res_includes.append(req_include)
                     break
             if not valid:
                 invalid_includes.append(req_include)
         if len(res_includes) == 0:
-            res_excludes = {'path:.'}
+            res_excludes = {b'path:.'}
         else:
             res_includes = set(res_includes)
     else:
@@ -285,15 +287,15 @@
 def _writeaddedfiles(repo, pctx, files):
     actions = merge.emptyactions()
     addgaction = actions[merge.ACTION_GET].append
-    mf = repo['.'].manifest()
+    mf = repo[b'.'].manifest()
     for f in files:
         if not repo.wvfs.exists(f):
-            addgaction((f, (mf.flags(f), False), "narrowspec updated"))
+            addgaction((f, (mf.flags(f), False), b"narrowspec updated"))
     merge.applyupdates(
         repo,
         actions,
         wctx=repo[None],
-        mctx=repo['.'],
+        mctx=repo[b'.'],
         overwrite=False,
         wantfiledata=False,
     )
@@ -307,8 +309,8 @@
     wcspec = repo.vfs.tryread(DIRSTATE_FILENAME)
     if wcspec != storespec:
         raise error.Abort(
-            _("working copy's narrowspec is stale"),
-            hint=_("run 'hg tracked --update-working-copy'"),
+            _(b"working copy's narrowspec is stale"),
+            hint=_(b"run 'hg tracked --update-working-copy'"),
         )
 
 
@@ -343,15 +345,17 @@
     _deletecleanfiles(repo, clean)
     uipathfn = scmutil.getuipathfn(repo)
     for f in sorted(trackeddirty):
-        repo.ui.status(_('not deleting possibly dirty file %s\n') % uipathfn(f))
+        repo.ui.status(
+            _(b'not deleting possibly dirty file %s\n') % uipathfn(f)
+        )
     for f in sorted(status.unknown):
-        repo.ui.status(_('not deleting unknown file %s\n') % uipathfn(f))
+        repo.ui.status(_(b'not deleting unknown file %s\n') % uipathfn(f))
     for f in sorted(status.ignored):
-        repo.ui.status(_('not deleting ignored file %s\n') % uipathfn(f))
+        repo.ui.status(_(b'not deleting ignored file %s\n') % uipathfn(f))
     for f in clean + trackeddirty:
         ds.drop(f)
 
-    pctx = repo['.']
+    pctx = repo[b'.']
     newfiles = [f for f in pctx.manifest().walk(addedmatch) if f not in ds]
     for f in newfiles:
         ds.normallookup(f)
--- a/mercurial/node.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/node.py	Sun Oct 06 09:48:39 2019 -0400
@@ -29,11 +29,11 @@
 # Phony node value to stand-in for new files in some uses of
 # manifests.
 # In hex, this is '2121212121212121212121212121212121212121'
-newnodeid = '!!!!!!!!!!!!!!!!!!!!'
+newnodeid = b'!!!!!!!!!!!!!!!!!!!!'
 # In hex, this is '3030303030303030303030303030306164646564'
-addednodeid = '000000000000000added'
+addednodeid = b'000000000000000added'
 # In hex, this is '3030303030303030303030306d6f646966696564'
-modifiednodeid = '000000000000modified'
+modifiednodeid = b'000000000000modified'
 
 wdirfilenodeids = {newnodeid, addednodeid, modifiednodeid}
 
--- a/mercurial/obsolete.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/obsolete.py	Sun Oct 06 09:48:39 2019 -0400
@@ -94,17 +94,17 @@
 propertycache = util.propertycache
 
 # Options for obsolescence
-createmarkersopt = 'createmarkers'
-allowunstableopt = 'allowunstable'
-exchangeopt = 'exchange'
+createmarkersopt = b'createmarkers'
+allowunstableopt = b'allowunstable'
+exchangeopt = b'exchange'
 
 
 def _getoptionvalue(repo, option):
     """Returns True if the given repository has the given obsolete option
     enabled.
     """
-    configkey = 'evolution.%s' % option
-    newconfig = repo.ui.configbool('experimental', configkey)
+    configkey = b'evolution.%s' % option
+    newconfig = repo.ui.configbool(b'experimental', configkey)
 
     # Return the value only if defined
     if newconfig is not None:
@@ -112,19 +112,19 @@
 
     # Fallback on generic option
     try:
-        return repo.ui.configbool('experimental', 'evolution')
+        return repo.ui.configbool(b'experimental', b'evolution')
     except (error.ConfigError, AttributeError):
         # Fallback on old-fashion config
         # inconsistent config: experimental.evolution
-        result = set(repo.ui.configlist('experimental', 'evolution'))
+        result = set(repo.ui.configlist(b'experimental', b'evolution'))
 
-        if 'all' in result:
+        if b'all' in result:
             return True
 
         # Temporary hack for next check
-        newconfig = repo.ui.config('experimental', 'evolution.createmarkers')
+        newconfig = repo.ui.config(b'experimental', b'evolution.createmarkers')
         if newconfig:
-            result.add('createmarkers')
+            result.add(b'createmarkers')
 
         return option in result
 
@@ -140,8 +140,8 @@
     if (unstablevalue or exchangevalue) and not createmarkersvalue:
         raise error.Abort(
             _(
-                "'createmarkers' obsolete option must be enabled "
-                "if other obsolete options are enabled"
+                b"'createmarkers' obsolete option must be enabled "
+                b"if other obsolete options are enabled"
             )
         )
 
@@ -185,8 +185,8 @@
 #   additional encoding. Keys cannot contain '\0' or ':' and values
 #   cannot contain '\0'.
 _fm0version = 0
-_fm0fixed = '>BIB20s'
-_fm0node = '20s'
+_fm0fixed = b'>BIB20s'
+_fm0node = b'20s'
 _fm0fsize = _calcsize(_fm0fixed)
 _fm0fnodesize = _calcsize(_fm0node)
 
@@ -211,24 +211,24 @@
         if len(metadata) != mdsize:
             raise error.Abort(
                 _(
-                    'parsing obsolete marker: metadata is too '
-                    'short, %d bytes expected, got %d'
+                    b'parsing obsolete marker: metadata is too '
+                    b'short, %d bytes expected, got %d'
                 )
                 % (mdsize, len(metadata))
             )
         off += mdsize
         metadata = _fm0decodemeta(metadata)
         try:
-            when, offset = metadata.pop('date', '0 0').split(' ')
+            when, offset = metadata.pop(b'date', b'0 0').split(b' ')
             date = float(when), int(offset)
         except ValueError:
             date = (0.0, 0)
         parents = None
-        if 'p2' in metadata:
-            parents = (metadata.pop('p1', None), metadata.pop('p2', None))
-        elif 'p1' in metadata:
-            parents = (metadata.pop('p1', None),)
-        elif 'p0' in metadata:
+        if b'p2' in metadata:
+            parents = (metadata.pop(b'p1', None), metadata.pop(b'p2', None))
+        elif b'p1' in metadata:
+            parents = (metadata.pop(b'p1', None),)
+        elif b'p0' in metadata:
             parents = ()
         if parents is not None:
             try:
@@ -250,16 +250,16 @@
 def _fm0encodeonemarker(marker):
     pre, sucs, flags, metadata, date, parents = marker
     if flags & usingsha256:
-        raise error.Abort(_('cannot handle sha256 with old obsstore format'))
+        raise error.Abort(_(b'cannot handle sha256 with old obsstore format'))
     metadata = dict(metadata)
     time, tz = date
-    metadata['date'] = '%r %i' % (time, tz)
+    metadata[b'date'] = b'%r %i' % (time, tz)
     if parents is not None:
         if not parents:
             # mark that we explicitly recorded no parents
-            metadata['p0'] = ''
+            metadata[b'p0'] = b''
         for i, p in enumerate(parents, 1):
-            metadata['p%i' % i] = node.hex(p)
+            metadata[b'p%i' % i] = node.hex(p)
     metadata = _fm0encodemeta(metadata)
     numsuc = len(sucs)
     format = _fm0fixed + (_fm0node * numsuc)
@@ -273,19 +273,19 @@
 
     Assume no ':' in key and no '\0' in both key and value."""
     for key, value in meta.iteritems():
-        if ':' in key or '\0' in key:
-            raise ValueError("':' and '\0' are forbidden in metadata key'")
-        if '\0' in value:
-            raise ValueError("':' is forbidden in metadata value'")
-    return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
+        if b':' in key or b'\0' in key:
+            raise ValueError(b"':' and '\0' are forbidden in metadata key'")
+        if b'\0' in value:
+            raise ValueError(b"':' is forbidden in metadata value'")
+    return b'\0'.join([b'%s:%s' % (k, meta[k]) for k in sorted(meta)])
 
 
 def _fm0decodemeta(data):
     """Return string to string dictionary from encoded version."""
     d = {}
-    for l in data.split('\0'):
+    for l in data.split(b'\0'):
         if l:
-            key, value = l.split(':', 1)
+            key, value = l.split(b':', 1)
             d[key] = value
     return d
 
@@ -325,16 +325,16 @@
 #
 # - remaining bytes: the metadata, each (key, value) pair after the other.
 _fm1version = 1
-_fm1fixed = '>IdhHBBB20s'
-_fm1nodesha1 = '20s'
-_fm1nodesha256 = '32s'
+_fm1fixed = b'>IdhHBBB20s'
+_fm1nodesha1 = b'20s'
+_fm1nodesha256 = b'32s'
 _fm1nodesha1size = _calcsize(_fm1nodesha1)
 _fm1nodesha256size = _calcsize(_fm1nodesha256)
 _fm1fsize = _calcsize(_fm1fixed)
 _fm1parentnone = 3
 _fm1parentshift = 14
 _fm1parentmask = _fm1parentnone << _fm1parentshift
-_fm1metapair = 'BB'
+_fm1metapair = b'BB'
 _fm1metapairsize = _calcsize(_fm1metapair)
 
 
@@ -402,7 +402,7 @@
 
         # read metadata
         off = o3 + metasize * nummeta
-        metapairsize = unpack('>' + (metafmt * nummeta), data[o3:off])
+        metapairsize = unpack(b'>' + (metafmt * nummeta), data[o3:off])
         metadata = []
         for idx in pycompat.xrange(0, len(metapairsize), 2):
             o1 = off + metapairsize[idx]
@@ -441,14 +441,14 @@
         lv = len(value)
         if lk > 255:
             msg = (
-                'obsstore metadata key cannot be longer than 255 bytes'
-                ' (key "%s" is %u bytes)'
+                b'obsstore metadata key cannot be longer than 255 bytes'
+                b' (key "%s" is %u bytes)'
             ) % (key, lk)
             raise error.ProgrammingError(msg)
         if lv > 255:
             msg = (
-                'obsstore metadata value cannot be longer than 255 bytes'
-                ' (value "%s" for key "%s" is %u bytes)'
+                b'obsstore metadata value cannot be longer than 255 bytes'
+                b' (value "%s" for key "%s" is %u bytes)'
             ) % (value, key, lv)
             raise error.ProgrammingError(msg)
         data.append(lk)
@@ -459,7 +459,7 @@
     for key, value in metadata:
         data.append(key)
         data.append(value)
-    return ''.join(data)
+    return b''.join(data)
 
 
 def _fm1readmarkers(data, off, stop):
@@ -478,7 +478,7 @@
 
 
 def _readmarkerversion(data):
-    return _unpack('>B', data[0:1])[0]
+    return _unpack(b'>B', data[0:1])[0]
 
 
 @util.nogc
@@ -490,13 +490,13 @@
     if stop is None:
         stop = len(data)
     if diskversion not in formats:
-        msg = _('parsing obsolete marker: unknown version %r') % diskversion
+        msg = _(b'parsing obsolete marker: unknown version %r') % diskversion
         raise error.UnknownVersion(msg, version=diskversion)
     return diskversion, formats[diskversion][0](data, off, stop)
 
 
 def encodeheader(version=_fm0version):
-    return _pack('>B', version)
+    return _pack(b'>B', version)
 
 
 def encodemarkers(markers, addheader=False, version=_fm0version):
@@ -541,8 +541,8 @@
         if node.nullid in mark[1]:
             raise error.Abort(
                 _(
-                    'bad obsolescence marker detected: '
-                    'invalid successors nullid'
+                    b'bad obsolescence marker detected: '
+                    b'invalid successors nullid'
                 )
             )
 
@@ -556,7 +556,7 @@
     - children[x]   -> set(markers on predecessors edges of children(x)
     """
 
-    fields = ('prec', 'succs', 'flag', 'meta', 'date', 'parents')
+    fields = (b'prec', b'succs', b'flag', b'meta', b'date', b'parents')
     # prec:    nodeid, predecessors changesets
     # succs:   tuple of nodeid, successor changesets (0-N length)
     # flag:    integer, flag field carrying modifier for the markers (see doc)
@@ -581,7 +581,7 @@
     def __nonzero__(self):
         if not self._cached(r'_all'):
             try:
-                return self.svfs.stat('obsstore').st_size > 1
+                return self.svfs.stat(b'obsstore').st_size > 1
             except OSError as inst:
                 if inst.errno != errno.ENOENT:
                     raise
@@ -624,11 +624,11 @@
         if metadata is None:
             metadata = {}
         if date is None:
-            if 'date' in metadata:
+            if b'date' in metadata:
                 # as a courtesy for out-of-tree extensions
-                date = dateutil.parsedate(metadata.pop('date'))
+                date = dateutil.parsedate(metadata.pop(b'date'))
             elif ui is not None:
-                date = ui.configdate('devel', 'default-date')
+                date = ui.configdate(b'devel', b'default-date')
                 if date is None:
                     date = dateutil.makedate()
             else:
@@ -651,8 +651,8 @@
                 v.decode('utf-8')
             except UnicodeDecodeError:
                 raise error.ProgrammingError(
-                    'obsstore metadata must be valid UTF-8 sequence '
-                    '(key = %r, value = %r)'
+                    b'obsstore metadata must be valid UTF-8 sequence '
+                    b'(key = %r, value = %r)'
                     % (pycompat.bytestr(k), pycompat.bytestr(v))
                 )
 
@@ -666,7 +666,7 @@
         Return the number of new marker."""
         if self._readonly:
             raise error.Abort(
-                _('creating obsolete markers is not enabled on ' 'this repo')
+                _(b'creating obsolete markers is not enabled on ' b'this repo')
             )
         known = set()
         getsuccessors = self.successors.get
@@ -676,10 +676,10 @@
                 known.add(m)
                 new.append(m)
         if new:
-            f = self.svfs('obsstore', 'ab')
+            f = self.svfs(b'obsstore', b'ab')
             try:
                 offset = f.tell()
-                transaction.add('obsstore', offset)
+                transaction.add(b'obsstore', offset)
                 # offset == 0: new file - add the version header
                 data = b''.join(encodemarkers(new, offset == 0, self._version))
                 f.write(data)
@@ -687,15 +687,15 @@
                 # XXX: f.close() == filecache invalidation == obsstore rebuilt.
                 # call 'filecacheentry.refresh()'  here
                 f.close()
-            addedmarkers = transaction.changes.get('obsmarkers')
+            addedmarkers = transaction.changes.get(b'obsmarkers')
             if addedmarkers is not None:
                 addedmarkers.update(new)
             self._addmarkers(new, data)
             # new marker *may* have changed several set. invalidate the cache.
             self.caches.clear()
         # records the number of new markers for the transaction hooks
-        previous = int(transaction.hookargs.get('new_obsmarkers', '0'))
-        transaction.hookargs['new_obsmarkers'] = '%d' % (previous + len(new))
+        previous = int(transaction.hookargs.get(b'new_obsmarkers', b'0'))
+        transaction.hookargs[b'new_obsmarkers'] = b'%d' % (previous + len(new))
         return len(new)
 
     def mergemarkers(self, transaction, data):
@@ -707,7 +707,7 @@
 
     @propertycache
     def _data(self):
-        return self.svfs.tryread('obsstore')
+        return self.svfs.tryread(b'obsstore')
 
     @propertycache
     def _version(self):
@@ -797,7 +797,7 @@
     """Create an obsstore instance from a repo."""
     # read default format for new obsstore.
     # developer config: format.obsstore-version
-    defaultformat = ui.configint('format', 'obsstore-version')
+    defaultformat = ui.configint(b'format', b'obsstore-version')
     # rely on obsstore class default when possible.
     kwargs = {}
     if defaultformat is not None:
@@ -806,7 +806,7 @@
     store = obsstore(repo.svfs, readonly=readonly, **kwargs)
     if store and readonly:
         ui.warn(
-            _('obsolete feature not enabled but %i markers found!\n')
+            _(b'obsolete feature not enabled but %i markers found!\n')
             % len(list(store))
         )
     return store
@@ -849,8 +849,8 @@
         currentpart.append(nextdata)
         currentlen += len(nextdata)
     for idx, part in enumerate(reversed(parts)):
-        data = ''.join([_pack('>B', _fm0version)] + part)
-        keys['dump%i' % idx] = util.b85encode(data)
+        data = b''.join([_pack(b'>B', _fm0version)] + part)
+        keys[b'dump%i' % idx] = util.b85encode(data)
     return keys
 
 
@@ -863,14 +863,14 @@
 
 def pushmarker(repo, key, old, new):
     """Push markers over pushkey"""
-    if not key.startswith('dump'):
-        repo.ui.warn(_('unknown key: %r') % key)
+    if not key.startswith(b'dump'):
+        repo.ui.warn(_(b'unknown key: %r') % key)
         return False
     if old:
-        repo.ui.warn(_('unexpected old value for %r') % key)
+        repo.ui.warn(_(b'unexpected old value for %r') % key)
         return False
     data = util.b85decode(new)
-    with repo.lock(), repo.transaction('pushkey: obsolete markers') as tr:
+    with repo.lock(), repo.transaction(b'pushkey: obsolete markers') as tr:
         repo.obsstore.mergemarkers(tr, data)
         repo.invalidatevolatilesets()
         return True
@@ -885,7 +885,7 @@
 
     def decorator(func):
         if name in cachefuncs:
-            msg = "duplicated registration for volatileset '%s' (existing: %r)"
+            msg = b"duplicated registration for volatileset '%s' (existing: %r)"
             raise error.ProgrammingError(msg % (name, cachefuncs[name]))
         cachefuncs[name] = func
         return func
@@ -920,7 +920,7 @@
     (We could be smarter here given the exact event that trigger the cache
     clearing)"""
     # only clear cache is there is obsstore data in this repo
-    if 'obsstore' in repo._filecache:
+    if b'obsstore' in repo._filecache:
         repo.obsstore.caches.clear()
 
 
@@ -929,7 +929,7 @@
     return repo._phasecache.getrevset(repo, phases.mutablephases)
 
 
-@cachefor('obsolete')
+@cachefor(b'obsolete')
 def _computeobsoleteset(repo):
     """the set of obsolete revisions"""
     getnode = repo.changelog.node
@@ -939,12 +939,12 @@
     return obs
 
 
-@cachefor('orphan')
+@cachefor(b'orphan')
 def _computeorphanset(repo):
     """the set of non obsolete revisions with obsolete parents"""
     pfunc = repo.changelog.parentrevs
     mutable = _mutablerevs(repo)
-    obsolete = getrevs(repo, 'obsolete')
+    obsolete = getrevs(repo, b'obsolete')
     others = mutable - obsolete
     unstable = set()
     for r in sorted(others):
@@ -957,20 +957,20 @@
     return unstable
 
 
-@cachefor('suspended')
+@cachefor(b'suspended')
 def _computesuspendedset(repo):
     """the set of obsolete parents with non obsolete descendants"""
-    suspended = repo.changelog.ancestors(getrevs(repo, 'orphan'))
-    return set(r for r in getrevs(repo, 'obsolete') if r in suspended)
+    suspended = repo.changelog.ancestors(getrevs(repo, b'orphan'))
+    return set(r for r in getrevs(repo, b'obsolete') if r in suspended)
 
 
-@cachefor('extinct')
+@cachefor(b'extinct')
 def _computeextinctset(repo):
     """the set of obsolete parents without non obsolete descendants"""
-    return getrevs(repo, 'obsolete') - getrevs(repo, 'suspended')
+    return getrevs(repo, b'obsolete') - getrevs(repo, b'suspended')
 
 
-@cachefor('phasedivergent')
+@cachefor(b'phasedivergent')
 def _computephasedivergentset(repo):
     """the set of revs trying to obsolete public revisions"""
     bumped = set()
@@ -981,7 +981,7 @@
     torev = cl.nodemap.get
     tonode = cl.node
     obsstore = repo.obsstore
-    for rev in repo.revs('(not public()) and (not obsolete())'):
+    for rev in repo.revs(b'(not public()) and (not obsolete())'):
         # We only evaluate mutable, non-obsolete revision
         node = tonode(rev)
         # (future) A cache of predecessors may worth if split is very common
@@ -996,7 +996,7 @@
     return bumped
 
 
-@cachefor('contentdivergent')
+@cachefor(b'contentdivergent')
 def _computecontentdivergentset(repo):
     """the set of rev that compete to be the final successors of some revision.
     """
@@ -1004,7 +1004,7 @@
     obsstore = repo.obsstore
     newermap = {}
     tonode = repo.changelog.node
-    for rev in repo.revs('(not public()) - obsolete()'):
+    for rev in repo.revs(b'(not public()) - obsolete()'):
         node = tonode(rev)
         mark = obsstore.predecessors.get(node, ())
         toprocess = set(mark)
@@ -1028,7 +1028,7 @@
 
     folddigest = hashlib.sha1(user)
     for p in relation[0] + relation[1]:
-        folddigest.update('%d' % p.rev())
+        folddigest.update(b'%d' % p.rev())
         folddigest.update(p.node())
     # Since fold only has to compete against fold for the same successors, it
     # seems fine to use a small ID. Smaller ID save space.
@@ -1057,23 +1057,25 @@
     # prepare metadata
     if metadata is None:
         metadata = {}
-    if 'user' not in metadata:
-        luser = repo.ui.config('devel', 'user.obsmarker') or repo.ui.username()
-        metadata['user'] = encoding.fromlocal(luser)
+    if b'user' not in metadata:
+        luser = (
+            repo.ui.config(b'devel', b'user.obsmarker') or repo.ui.username()
+        )
+        metadata[b'user'] = encoding.fromlocal(luser)
 
     # Operation metadata handling
     useoperation = repo.ui.configbool(
-        'experimental', 'evolution.track-operation'
+        b'experimental', b'evolution.track-operation'
     )
     if useoperation and operation:
-        metadata['operation'] = operation
+        metadata[b'operation'] = operation
 
     # Effect flag metadata handling
     saveeffectflag = repo.ui.configbool(
-        'experimental', 'evolution.effect-flags'
+        b'experimental', b'evolution.effect-flags'
     )
 
-    with repo.transaction('add-obsolescence-marker') as tr:
+    with repo.transaction(b'add-obsolescence-marker') as tr:
         markerargs = []
         for rel in relations:
             predecessors = rel[0]
@@ -1081,26 +1083,26 @@
                 # preserve compat with old API until all caller are migrated
                 predecessors = (predecessors,)
             if len(predecessors) > 1 and len(rel[1]) != 1:
-                msg = 'Fold markers can only have 1 successors, not %d'
+                msg = b'Fold markers can only have 1 successors, not %d'
                 raise error.ProgrammingError(msg % len(rel[1]))
             foldid = None
             foldsize = len(predecessors)
             if 1 < foldsize:
-                foldid = makefoldid(rel, metadata['user'])
+                foldid = makefoldid(rel, metadata[b'user'])
             for foldidx, prec in enumerate(predecessors, 1):
                 sucs = rel[1]
                 localmetadata = metadata.copy()
                 if len(rel) > 2:
                     localmetadata.update(rel[2])
                 if foldid is not None:
-                    localmetadata['fold-id'] = foldid
-                    localmetadata['fold-idx'] = '%d' % foldidx
-                    localmetadata['fold-size'] = '%d' % foldsize
+                    localmetadata[b'fold-id'] = foldid
+                    localmetadata[b'fold-idx'] = b'%d' % foldidx
+                    localmetadata[b'fold-size'] = b'%d' % foldsize
 
                 if not prec.mutable():
                     raise error.Abort(
-                        _("cannot obsolete public changeset: %s") % prec,
-                        hint="see 'hg help phases' for details",
+                        _(b"cannot obsolete public changeset: %s") % prec,
+                        hint=b"see 'hg help phases' for details",
                     )
                 nprec = prec.node()
                 nsucs = tuple(s.node() for s in sucs)
@@ -1109,7 +1111,7 @@
                     npare = tuple(p.node() for p in prec.parents())
                 if nprec in nsucs:
                     raise error.Abort(
-                        _("changeset %s cannot obsolete itself") % prec
+                        _(b"changeset %s cannot obsolete itself") % prec
                     )
 
                 # Effect flag can be different by relation
@@ -1117,7 +1119,7 @@
                     # The effect flag is saved in a versioned field name for
                     # future evolution
                     effectflag = obsutil.geteffectflag(prec, sucs)
-                    localmetadata[obsutil.EFFECTFLAGFIELD] = "%d" % effectflag
+                    localmetadata[obsutil.EFFECTFLAGFIELD] = b"%d" % effectflag
 
                 # Creating the marker causes the hidden cache to become
                 # invalid, which causes recomputation when we ask for
--- a/mercurial/obsutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/obsutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -323,7 +323,7 @@
     Beware that possible obsolescence cycle may result if complex situation.
     """
     repo = repo.unfiltered()
-    foreground = set(repo.set('%ln::', nodes))
+    foreground = set(repo.set(b'%ln::', nodes))
     if repo.obsstore:
         # We only need this complicated logic if there is obsolescence
         # XXX will probably deserve an optimised revset.
@@ -336,7 +336,7 @@
             mutable = [c.node() for c in foreground if c.mutable()]
             succs.update(allsuccessors(repo.obsstore, mutable))
             known = (n for n in succs if n in nm)
-            foreground = set(repo.set('%ln::', known))
+            foreground = set(repo.set(b'%ln::', known))
     return set(c.node() for c in foreground)
 
 
@@ -355,7 +355,7 @@
 # `effect-flags` set to off by default.
 #
 
-EFFECTFLAGFIELD = "ef1"
+EFFECTFLAGFIELD = b"ef1"
 
 DESCCHANGED = 1 << 0  # action changed the description
 METACHANGED = 1 << 1  # action change the meta
@@ -366,10 +366,10 @@
 BRANCHCHANGED = 1 << 6  # the branch changed
 
 METABLACKLIST = [
-    re.compile('^branch$'),
-    re.compile('^.*-source$'),
-    re.compile('^.*_source$'),
-    re.compile('^source$'),
+    re.compile(b'^branch$'),
+    re.compile(b'^.*-source$'),
+    re.compile(b'^.*_source$'),
+    re.compile(b'^source$'),
 ]
 
 
@@ -408,7 +408,7 @@
 
     This is a first and basic implementation, with many shortcoming.
     """
-    diffopts = diffutil.diffallopts(leftctx.repo().ui, {'git': True})
+    diffopts = diffutil.diffallopts(leftctx.repo().ui, {b'git': True})
 
     # Leftctx or right ctx might be filtered, so we need to use the contexts
     # with an unfiltered repository to safely compute the diff
@@ -481,8 +481,8 @@
     phase = repo._phasecache.phase
     succsmarkers = repo.obsstore.successors.get
     public = phases.public
-    addedmarkers = tr.changes['obsmarkers']
-    origrepolen = tr.changes['origrepolen']
+    addedmarkers = tr.changes[b'obsmarkers']
+    origrepolen = tr.changes[b'origrepolen']
     seenrevs = set()
     obsoleted = set()
     for mark in addedmarkers:
@@ -794,7 +794,7 @@
 
     values = []
     for sset in fullsuccessorsets:
-        values.append({'successors': sset, 'markers': sset.markers})
+        values.append({b'successors': sset, b'markers': sset.markers})
 
     return values
 
@@ -813,17 +813,17 @@
 
     if len(successorssets) == 0:
         # The commit has been pruned
-        return 'pruned'
+        return b'pruned'
     elif len(successorssets) > 1:
-        return 'diverged'
+        return b'diverged'
     else:
         # No divergence, only one set of successors
         successors = successorssets[0]
 
         if len(successors) == 1:
-            return 'superseded'
+            return b'superseded'
         else:
-            return 'superseded_split'
+            return b'superseded_split'
 
 
 def obsfateverb(successorset, markers):
@@ -831,11 +831,11 @@
     information from the markers
     """
     if not successorset:
-        verb = 'pruned'
+        verb = b'pruned'
     elif len(successorset) == 1:
-        verb = 'rewritten'
+        verb = b'rewritten'
     else:
-        verb = 'split'
+        verb = b'split'
     return verb
 
 
@@ -850,9 +850,9 @@
     """
     markersmeta = [dict(m[3]) for m in markers]
     users = set(
-        encoding.tolocal(meta['user'])
+        encoding.tolocal(meta[b'user'])
         for meta in markersmeta
-        if meta.get('user')
+        if meta.get(b'user')
     )
 
     return sorted(users)
@@ -863,7 +863,7 @@
     """
     markersmeta = [dict(m[3]) for m in markers]
     operations = set(
-        meta.get('operation') for meta in markersmeta if meta.get('operation')
+        meta.get(b'operation') for meta in markersmeta if meta.get(b'operation')
     )
 
     return sorted(operations)
@@ -885,12 +885,12 @@
     # Operations
     operations = markersoperations(markers)
     if operations:
-        line.append(" using %s" % ", ".join(operations))
+        line.append(b" using %s" % b", ".join(operations))
 
     # Successors
     if successors:
         fmtsuccessors = [formatctx(repo[succ]) for succ in successors]
-        line.append(" as %s" % ", ".join(fmtsuccessors))
+        line.append(b" as %s" % b", ".join(fmtsuccessors))
 
     # Users
     users = markersusers(markers)
@@ -902,7 +902,7 @@
             users = None
 
     if (verbose or normal) and users:
-        line.append(" by %s" % ", ".join(users))
+        line.append(b" by %s" % b", ".join(users))
 
     # Date
     dates = markersdates(markers)
@@ -912,23 +912,23 @@
         max_date = max(dates)
 
         if min_date == max_date:
-            fmtmin_date = dateutil.datestr(min_date, '%Y-%m-%d %H:%M %1%2')
-            line.append(" (at %s)" % fmtmin_date)
+            fmtmin_date = dateutil.datestr(min_date, b'%Y-%m-%d %H:%M %1%2')
+            line.append(b" (at %s)" % fmtmin_date)
         else:
-            fmtmin_date = dateutil.datestr(min_date, '%Y-%m-%d %H:%M %1%2')
-            fmtmax_date = dateutil.datestr(max_date, '%Y-%m-%d %H:%M %1%2')
-            line.append(" (between %s and %s)" % (fmtmin_date, fmtmax_date))
+            fmtmin_date = dateutil.datestr(min_date, b'%Y-%m-%d %H:%M %1%2')
+            fmtmax_date = dateutil.datestr(max_date, b'%Y-%m-%d %H:%M %1%2')
+            line.append(b" (between %s and %s)" % (fmtmin_date, fmtmax_date))
 
-    return "".join(line)
+    return b"".join(line)
 
 
 filteredmsgtable = {
-    "pruned": _("hidden revision '%s' is pruned"),
-    "diverged": _("hidden revision '%s' has diverged"),
-    "superseded": _("hidden revision '%s' was rewritten as: %s"),
-    "superseded_split": _("hidden revision '%s' was split as: %s"),
-    "superseded_split_several": _(
-        "hidden revision '%s' was split as: %s and " "%d more"
+    b"pruned": _(b"hidden revision '%s' is pruned"),
+    b"diverged": _(b"hidden revision '%s' has diverged"),
+    b"superseded": _(b"hidden revision '%s' was rewritten as: %s"),
+    b"superseded_split": _(b"hidden revision '%s' was split as: %s"),
+    b"superseded_split_several": _(
+        b"hidden revision '%s' was split as: %s and " b"%d more"
     ),
 }
 
@@ -940,28 +940,28 @@
     fate = _getobsfate(successors)
 
     # Be more precise in case the revision is superseded
-    if fate == 'pruned':
-        return filteredmsgtable['pruned'] % changeid
-    elif fate == 'diverged':
-        return filteredmsgtable['diverged'] % changeid
-    elif fate == 'superseded':
+    if fate == b'pruned':
+        return filteredmsgtable[b'pruned'] % changeid
+    elif fate == b'diverged':
+        return filteredmsgtable[b'diverged'] % changeid
+    elif fate == b'superseded':
         single_successor = nodemod.short(successors[0][0])
-        return filteredmsgtable['superseded'] % (changeid, single_successor)
-    elif fate == 'superseded_split':
+        return filteredmsgtable[b'superseded'] % (changeid, single_successor)
+    elif fate == b'superseded_split':
 
         succs = []
         for node_id in successors[0]:
             succs.append(nodemod.short(node_id))
 
         if len(succs) <= 2:
-            fmtsuccs = ', '.join(succs)
-            return filteredmsgtable['superseded_split'] % (changeid, fmtsuccs)
+            fmtsuccs = b', '.join(succs)
+            return filteredmsgtable[b'superseded_split'] % (changeid, fmtsuccs)
         else:
-            firstsuccessors = ', '.join(succs[:2])
+            firstsuccessors = b', '.join(succs[:2])
             remainingnumber = len(succs) - 2
 
             args = (changeid, firstsuccessors, remainingnumber)
-            return filteredmsgtable['superseded_split_several'] % args
+            return filteredmsgtable[b'superseded_split_several'] % args
 
 
 def divergentsets(repo, ctx):
@@ -982,7 +982,7 @@
                 continue
             base[tuple(nsuccset)] = n
     return [
-        {'divergentnodes': divset, 'commonpredecessor': b}
+        {b'divergentnodes': divset, b'commonpredecessor': b}
         for divset, b in base.iteritems()
     ]
 
@@ -993,15 +993,15 @@
         for parent in ctx.parents():
             kind = None
             if parent.orphan():
-                kind = 'orphan'
+                kind = b'orphan'
             elif parent.obsolete():
-                kind = 'obsolete'
+                kind = b'obsolete'
             if kind is not None:
                 result.append(
                     {
-                        'instability': 'orphan',
-                        'reason': '%s parent' % kind,
-                        'node': parent.hex(),
+                        b'instability': b'orphan',
+                        b'reason': b'%s parent' % kind,
+                        b'node': parent.hex(),
                     }
                 )
     if ctx.phasedivergent():
@@ -1014,21 +1014,21 @@
         for predecessor in immutable:
             result.append(
                 {
-                    'instability': 'phase-divergent',
-                    'reason': 'immutable predecessor',
-                    'node': predecessor.hex(),
+                    b'instability': b'phase-divergent',
+                    b'reason': b'immutable predecessor',
+                    b'node': predecessor.hex(),
                 }
             )
     if ctx.contentdivergent():
         dsets = divergentsets(repo, ctx)
         for dset in dsets:
-            divnodes = [repo[n] for n in dset['divergentnodes']]
+            divnodes = [repo[n] for n in dset[b'divergentnodes']]
             result.append(
                 {
-                    'instability': 'content-divergent',
-                    'divergentnodes': divnodes,
-                    'reason': 'predecessor',
-                    'node': nodemod.hex(dset['commonpredecessor']),
+                    b'instability': b'content-divergent',
+                    b'divergentnodes': divnodes,
+                    b'reason': b'predecessor',
+                    b'node': nodemod.hex(dset[b'commonpredecessor']),
                 }
             )
     return result
--- a/mercurial/parser.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/parser.py	Sun Oct 06 09:48:39 2019 -0400
@@ -34,25 +34,25 @@
         self.current = None
 
     def _advance(self):
-        'advance the tokenizer'
+        b'advance the tokenizer'
         t = self.current
         self.current = next(self._iter, None)
         return t
 
     def _hasnewterm(self):
-        'True if next token may start new term'
+        b'True if next token may start new term'
         return any(self._elements[self.current[0]][1:3])
 
     def _match(self, m):
-        'make sure the tokenizer matches an end condition'
+        b'make sure the tokenizer matches an end condition'
         if self.current[0] != m:
             raise error.ParseError(
-                _("unexpected token: %s") % self.current[0], self.current[2]
+                _(b"unexpected token: %s") % self.current[0], self.current[2]
             )
         self._advance()
 
     def _parseoperand(self, bind, m=None):
-        'gather right-hand-side operand until an end condition or binding met'
+        b'gather right-hand-side operand until an end condition or binding met'
         if m and self.current[0] == m:
             expr = None
         else:
@@ -70,7 +70,7 @@
         elif prefix:
             expr = (prefix[0], self._parseoperand(*prefix[1:]))
         else:
-            raise error.ParseError(_("not a prefix: %s") % token, pos)
+            raise error.ParseError(_(b"not a prefix: %s") % token, pos)
         # gather tokens until we meet a lower binding strength
         while bind < self._elements[self.current[0]][0]:
             token, value, pos = self._advance()
@@ -81,11 +81,11 @@
             elif infix:
                 expr = (infix[0], expr, self._parseoperand(*infix[1:]))
             else:
-                raise error.ParseError(_("not an infix: %s") % token, pos)
+                raise error.ParseError(_(b"not an infix: %s") % token, pos)
         return expr
 
     def parse(self, tokeniter):
-        'generate a parse tree from tokens'
+        b'generate a parse tree from tokens'
         self._iter = tokeniter
         self._advance()
         res = self._parse()
@@ -93,13 +93,13 @@
         return res, pos
 
     def eval(self, tree):
-        'recursively evaluate a parse tree using node methods'
+        b'recursively evaluate a parse tree using node methods'
         if not isinstance(tree, tuple):
             return tree
         return self._methods[tree[0]](*[self.eval(t) for t in tree[1:]])
 
     def __call__(self, tokeniter):
-        'parse tokens into a parse tree and evaluate if methods given'
+        b'parse tokens into a parse tree and evaluate if methods given'
         t = self.parse(tokeniter)
         if self._methods:
             return self.eval(t)
@@ -121,21 +121,21 @@
     ([], None, [], 'foo')
     """
     optkey = None
-    pre, sep, post = spec.partition('**')
+    pre, sep, post = spec.partition(b'**')
     if sep:
         posts = post.split()
         if not posts:
-            raise error.ProgrammingError('no **optkey name provided')
+            raise error.ProgrammingError(b'no **optkey name provided')
         if len(posts) > 1:
-            raise error.ProgrammingError('excessive **optkey names provided')
+            raise error.ProgrammingError(b'excessive **optkey names provided')
         optkey = posts[0]
 
-    pre, sep, post = pre.partition('*')
+    pre, sep, post = pre.partition(b'*')
     pres = pre.split()
     posts = post.split()
     if sep:
         if not posts:
-            raise error.ProgrammingError('no *varkey name provided')
+            raise error.ProgrammingError(b'no *varkey name provided')
         return pres, posts[0], posts[1:], optkey
     return [], None, pres, optkey
 
@@ -163,13 +163,13 @@
     )
     if kwstart < len(poskeys):
         raise error.ParseError(
-            _("%(func)s takes at least %(nargs)d positional " "arguments")
-            % {'func': funcname, 'nargs': len(poskeys)}
+            _(b"%(func)s takes at least %(nargs)d positional " b"arguments")
+            % {b'func': funcname, b'nargs': len(poskeys)}
         )
     if not varkey and kwstart > len(poskeys) + len(keys):
         raise error.ParseError(
-            _("%(func)s takes at most %(nargs)d positional " "arguments")
-            % {'func': funcname, 'nargs': len(poskeys) + len(keys)}
+            _(b"%(func)s takes at most %(nargs)d positional " b"arguments")
+            % {b'func': funcname, b'nargs': len(poskeys) + len(keys)}
         )
     args = util.sortdict()
     # consume positional arguments
@@ -186,25 +186,25 @@
     for x in trees[kwstart:]:
         if not x or x[0] != keyvaluenode or x[1][0] != keynode:
             raise error.ParseError(
-                _("%(func)s got an invalid argument") % {'func': funcname}
+                _(b"%(func)s got an invalid argument") % {b'func': funcname}
             )
         k = x[1][1]
         if k in keys:
             d = args
         elif not optkey:
             raise error.ParseError(
-                _("%(func)s got an unexpected keyword " "argument '%(key)s'")
-                % {'func': funcname, 'key': k}
+                _(b"%(func)s got an unexpected keyword " b"argument '%(key)s'")
+                % {b'func': funcname, b'key': k}
             )
         else:
             d = args[optkey]
         if k in d:
             raise error.ParseError(
                 _(
-                    "%(func)s got multiple values for keyword "
-                    "argument '%(key)s'"
+                    b"%(func)s got multiple values for keyword "
+                    b"argument '%(key)s'"
                 )
-                % {'func': funcname, 'key': k}
+                % {b'func': funcname, b'key': k}
             )
         d[k] = x[2]
     return args
@@ -223,18 +223,18 @@
         lines.append((level, stringutil.pprint(tree)))
     elif tree[0] in leafnodes:
         rs = map(stringutil.pprint, tree[1:])
-        lines.append((level, '(%s %s)' % (tree[0], ' '.join(rs))))
+        lines.append((level, b'(%s %s)' % (tree[0], b' '.join(rs))))
     else:
-        lines.append((level, '(%s' % tree[0]))
+        lines.append((level, b'(%s' % tree[0]))
         for s in tree[1:]:
             _prettyformat(s, leafnodes, level + 1, lines)
-        lines[-1:] = [(lines[-1][0], lines[-1][1] + ')')]
+        lines[-1:] = [(lines[-1][0], lines[-1][1] + b')')]
 
 
 def prettyformat(tree, leafnodes):
     lines = []
     _prettyformat(tree, leafnodes, 0, lines)
-    output = '\n'.join(('  ' * l + s) for l, s in lines)
+    output = b'\n'.join((b'  ' * l + s) for l, s in lines)
     return output
 
 
@@ -339,11 +339,11 @@
     ('and', ('symbol', '1'), ('not', ('symbol', '2')))
     """
     if not isinstance(placeholder, tuple):
-        raise error.ProgrammingError('placeholder must be a node tuple')
+        raise error.ProgrammingError(b'placeholder must be a node tuple')
     replstack = list(reversed(repls))
     r = _buildtree(template, placeholder, replstack)
     if replstack:
-        raise error.ProgrammingError('too many replacements')
+        raise error.ProgrammingError(b'too many replacements')
     return r
 
 
@@ -398,7 +398,7 @@
     ...   (b'func', (b'symbol', b'ancestors'), (b'symbol', b'0')))
     """
     if placeholder is not None and not isinstance(placeholder, tuple):
-        raise error.ProgrammingError('placeholder must be a node tuple')
+        raise error.ProgrammingError(b'placeholder must be a node tuple')
     matches = [tree]
     if _matchtree(pattern, tree, placeholder, incompletenodes, matches):
         return matches
@@ -408,7 +408,7 @@
     """Compose error message from specified ParseError object
     """
     if len(inst.args) > 1:
-        return _('at %d: %s') % (inst.args[1], inst.args[0])
+        return _(b'at %d: %s') % (inst.args[1], inst.args[0])
     else:
         return inst.args[0]
 
@@ -443,10 +443,10 @@
     # typically a config section, which will be included in error messages
     _section = None
     # tag of symbol node
-    _symbolnode = 'symbol'
+    _symbolnode = b'symbol'
 
     def __new__(cls):
-        raise TypeError("'%s' is not instantiatable" % cls.__name__)
+        raise TypeError(b"'%s' is not instantiatable" % cls.__name__)
 
     @staticmethod
     def _parse(spec):
@@ -543,23 +543,27 @@
         if tree[0] == cls._symbolnode:
             # "name = ...." style
             name = tree[1]
-            if name.startswith('$'):
-                return (decl, None, _("invalid symbol '%s'") % name)
+            if name.startswith(b'$'):
+                return (decl, None, _(b"invalid symbol '%s'") % name)
             return (name, None, None)
 
         func = cls._trygetfunc(tree)
         if func:
             # "name(arg, ....) = ...." style
             name, args = func
-            if name.startswith('$'):
-                return (decl, None, _("invalid function '%s'") % name)
+            if name.startswith(b'$'):
+                return (decl, None, _(b"invalid function '%s'") % name)
             if any(t[0] != cls._symbolnode for t in args):
-                return (decl, None, _("invalid argument list"))
+                return (decl, None, _(b"invalid argument list"))
             if len(args) != len(set(args)):
-                return (name, None, _("argument names collide with each other"))
+                return (
+                    name,
+                    None,
+                    _(b"argument names collide with each other"),
+                )
             return (name, [t[1] for t in args], None)
 
-        return (decl, None, _("invalid format"))
+        return (decl, None, _(b"invalid format"))
 
     @classmethod
     def _relabelargs(cls, tree, args):
@@ -573,9 +577,9 @@
         assert len(tree) == 2
         sym = tree[1]
         if sym in args:
-            op = '_aliasarg'
-        elif sym.startswith('$'):
-            raise error.ParseError(_("invalid symbol '%s'") % sym)
+            op = b'_aliasarg'
+        elif sym.startswith(b'$'):
+            raise error.ParseError(_(b"invalid symbol '%s'") % sym)
         return (op, sym)
 
     @classmethod
@@ -638,15 +642,19 @@
         repl = efmt = None
         name, args, err = cls._builddecl(decl)
         if err:
-            efmt = _('bad declaration of %(section)s "%(name)s": %(error)s')
+            efmt = _(b'bad declaration of %(section)s "%(name)s": %(error)s')
         else:
             try:
                 repl = cls._builddefn(defn, args)
             except error.ParseError as inst:
                 err = parseerrordetail(inst)
-                efmt = _('bad definition of %(section)s "%(name)s": %(error)s')
+                efmt = _(b'bad definition of %(section)s "%(name)s": %(error)s')
         if err:
-            err = efmt % {'section': cls._section, 'name': name, 'error': err}
+            err = efmt % {
+                b'section': cls._section,
+                b'name': name,
+                b'error': err,
+            }
         return alias(name, args, err, repl)
 
     @classmethod
@@ -686,7 +694,7 @@
         """
         if not isinstance(tree, tuple):
             return tree
-        if tree[0] == '_aliasarg':
+        if tree[0] == b'_aliasarg':
             sym = tree[1]
             return args[sym]
         return tuple(cls._expandargs(t, args) for t in tree)
@@ -705,8 +713,8 @@
             raise error.Abort(a.error)
         if a in expanding:
             raise error.ParseError(
-                _('infinite expansion of %(section)s ' '"%(name)s" detected')
-                % {'section': cls._section, 'name': a.name}
+                _(b'infinite expansion of %(section)s ' b'"%(name)s" detected')
+                % {b'section': cls._section, b'name': a.name}
             )
         # get cacheable replacement tree by expanding aliases recursively
         expanding.append(a)
@@ -721,7 +729,7 @@
         # substitute function arguments in replacement tree
         if len(l) != len(a.args):
             raise error.ParseError(
-                _('invalid number of arguments: %d') % len(l)
+                _(b'invalid number of arguments: %d') % len(l)
             )
         l = [cls._expand(aliases, t, [], cache) for t in l]
         return cls._expandargs(result, dict(zip(a.args, l)))
--- a/mercurial/patch.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/patch.py	Sun Oct 06 09:48:39 2019 -0400
@@ -62,17 +62,17 @@
     '''return an iterator of individual patches from a stream'''
 
     def isheader(line, inheader):
-        if inheader and line.startswith((' ', '\t')):
+        if inheader and line.startswith((b' ', b'\t')):
             # continuation
             return True
-        if line.startswith((' ', '-', '+')):
+        if line.startswith((b' ', b'-', b'+')):
             # diff line - don't check for header pattern in there
             return False
-        l = line.split(': ', 1)
-        return len(l) == 2 and ' ' not in l[0]
+        l = line.split(b': ', 1)
+        return len(l) == 2 and b' ' not in l[0]
 
     def chunk(lines):
-        return stringio(''.join(lines))
+        return stringio(b''.join(lines))
 
     def hgsplit(stream, cur):
         inheader = True
@@ -80,7 +80,7 @@
         for line in stream:
             if not line.strip():
                 inheader = False
-            if not inheader and line.startswith('# HG changeset patch'):
+            if not inheader and line.startswith(b'# HG changeset patch'):
                 yield chunk(cur)
                 cur = []
                 inheader = True
@@ -92,7 +92,7 @@
 
     def mboxsplit(stream, cur):
         for line in stream:
-            if line.startswith('From '):
+            if line.startswith(b'From '):
                 for c in split(chunk(cur[1:])):
                     yield c
                 cur = []
@@ -119,7 +119,7 @@
         if not m.is_multipart():
             yield msgfp(m)
         else:
-            ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
+            ok_types = (b'text/plain', b'text/x-diff', b'text/x-patch')
             for part in m.walk():
                 ct = part.get_content_type()
                 if ct not in ok_types:
@@ -163,24 +163,24 @@
     inheader = False
     cur = []
 
-    mimeheaders = ['content-type']
-
-    if not util.safehasattr(stream, 'next'):
+    mimeheaders = [b'content-type']
+
+    if not util.safehasattr(stream, b'next'):
         # http responses, for example, have readline but not next
         stream = fiter(stream)
 
     for line in stream:
         cur.append(line)
-        if line.startswith('# HG changeset patch'):
+        if line.startswith(b'# HG changeset patch'):
             return hgsplit(stream, cur)
-        elif line.startswith('From '):
+        elif line.startswith(b'From '):
             return mboxsplit(stream, cur)
         elif isheader(line, inheader):
             inheader = True
-            if line.split(':', 1)[0].lower() in mimeheaders:
+            if line.split(b':', 1)[0].lower() in mimeheaders:
                 # let email parser handle this
                 return mimesplit(stream, cur)
-        elif line.startswith('--- ') and inheader:
+        elif line.startswith(b'--- ') and inheader:
             # No evil headers seen by diff start, split by hand
             return headersplit(stream, cur)
         # Not enough info, keep reading
@@ -192,9 +192,9 @@
 ## Some facility for extensible patch parsing:
 # list of pairs ("header to match", "data key")
 patchheadermap = [
-    ('Date', 'date'),
-    ('Branch', 'branch'),
-    ('Node ID', 'nodeid'),
+    (b'Date', b'date'),
+    (b'Branch', b'branch'),
+    (b'Node ID', b'nodeid'),
 ]
 
 
@@ -216,7 +216,7 @@
     Any item can be missing from the dictionary. If filename is missing,
     fileobj did not contain a patch. Caller must unlink filename when done.'''
 
-    fd, tmpname = pycompat.mkstemp(prefix='hg-patch-')
+    fd, tmpname = pycompat.mkstemp(prefix=b'hg-patch-')
     tmpfp = os.fdopen(fd, r'wb')
     try:
         yield _extract(ui, fileobj, tmpname, tmpfp)
@@ -242,34 +242,34 @@
     msg = mail.parse(fileobj)
 
     subject = msg[r'Subject'] and mail.headdecode(msg[r'Subject'])
-    data['user'] = msg[r'From'] and mail.headdecode(msg[r'From'])
-    if not subject and not data['user']:
+    data[b'user'] = msg[r'From'] and mail.headdecode(msg[r'From'])
+    if not subject and not data[b'user']:
         # Not an email, restore parsed headers if any
         subject = (
-            '\n'.join(
-                ': '.join(map(encoding.strtolocal, h)) for h in msg.items()
+            b'\n'.join(
+                b': '.join(map(encoding.strtolocal, h)) for h in msg.items()
             )
-            + '\n'
+            + b'\n'
         )
 
     # should try to parse msg['Date']
     parents = []
 
     if subject:
-        if subject.startswith('[PATCH'):
-            pend = subject.find(']')
+        if subject.startswith(b'[PATCH'):
+            pend = subject.find(b']')
             if pend >= 0:
                 subject = subject[pend + 1 :].lstrip()
-        subject = re.sub(br'\n[ \t]+', ' ', subject)
-        ui.debug('Subject: %s\n' % subject)
-    if data['user']:
-        ui.debug('From: %s\n' % data['user'])
+        subject = re.sub(br'\n[ \t]+', b' ', subject)
+        ui.debug(b'Subject: %s\n' % subject)
+    if data[b'user']:
+        ui.debug(b'From: %s\n' % data[b'user'])
     diffs_seen = 0
-    ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
-    message = ''
+    ok_types = (b'text/plain', b'text/x-diff', b'text/x-patch')
+    message = b''
     for part in msg.walk():
         content_type = pycompat.bytestr(part.get_content_type())
-        ui.debug('Content-Type: %s\n' % content_type)
+        ui.debug(b'Content-Type: %s\n' % content_type)
         if content_type not in ok_types:
             continue
         payload = part.get_payload(decode=True)
@@ -279,12 +279,12 @@
             hgpatchheader = False
             ignoretext = False
 
-            ui.debug('found patch at byte %d\n' % m.start(0))
+            ui.debug(b'found patch at byte %d\n' % m.start(0))
             diffs_seen += 1
             cfp = stringio()
             for line in payload[: m.start(0)].splitlines():
-                if line.startswith('# HG changeset patch') and not hgpatch:
-                    ui.debug('patch generated by hg export\n')
+                if line.startswith(b'# HG changeset patch') and not hgpatch:
+                    ui.debug(b'patch generated by hg export\n')
                     hgpatch = True
                     hgpatchheader = True
                     # drop earlier commit message content
@@ -292,43 +292,43 @@
                     cfp.truncate()
                     subject = None
                 elif hgpatchheader:
-                    if line.startswith('# User '):
-                        data['user'] = line[7:]
-                        ui.debug('From: %s\n' % data['user'])
-                    elif line.startswith("# Parent "):
+                    if line.startswith(b'# User '):
+                        data[b'user'] = line[7:]
+                        ui.debug(b'From: %s\n' % data[b'user'])
+                    elif line.startswith(b"# Parent "):
                         parents.append(line[9:].lstrip())
-                    elif line.startswith("# "):
+                    elif line.startswith(b"# "):
                         for header, key in patchheadermap:
-                            prefix = '# %s ' % header
+                            prefix = b'# %s ' % header
                             if line.startswith(prefix):
                                 data[key] = line[len(prefix) :]
-                                ui.debug('%s: %s\n' % (header, data[key]))
+                                ui.debug(b'%s: %s\n' % (header, data[key]))
                     else:
                         hgpatchheader = False
-                elif line == '---':
+                elif line == b'---':
                     ignoretext = True
                 if not hgpatchheader and not ignoretext:
                     cfp.write(line)
-                    cfp.write('\n')
+                    cfp.write(b'\n')
             message = cfp.getvalue()
             if tmpfp:
                 tmpfp.write(payload)
-                if not payload.endswith('\n'):
-                    tmpfp.write('\n')
-        elif not diffs_seen and message and content_type == 'text/plain':
-            message += '\n' + payload
+                if not payload.endswith(b'\n'):
+                    tmpfp.write(b'\n')
+        elif not diffs_seen and message and content_type == b'text/plain':
+            message += b'\n' + payload
 
     if subject and not message.startswith(subject):
-        message = '%s\n%s' % (subject, message)
-    data['message'] = message
+        message = b'%s\n%s' % (subject, message)
+    data[b'message'] = message
     tmpfp.close()
     if parents:
-        data['p1'] = parents.pop(0)
+        data[b'p1'] = parents.pop(0)
         if parents:
-            data['p2'] = parents.pop(0)
+            data[b'p2'] = parents.pop(0)
 
     if diffs_seen:
-        data['filename'] = tmpname
+        data[b'filename'] = tmpname
 
     return data
 
@@ -348,7 +348,7 @@
         self.path = path
         self.oldpath = None
         self.mode = None
-        self.op = 'MODIFY'
+        self.op = b'MODIFY'
         self.binary = False
 
     def setmode(self, mode):
@@ -365,14 +365,14 @@
         return other
 
     def _ispatchinga(self, afile):
-        if afile == '/dev/null':
-            return self.op == 'ADD'
-        return afile == 'a/' + (self.oldpath or self.path)
+        if afile == b'/dev/null':
+            return self.op == b'ADD'
+        return afile == b'a/' + (self.oldpath or self.path)
 
     def _ispatchingb(self, bfile):
-        if bfile == '/dev/null':
-            return self.op == 'DELETE'
-        return bfile == 'b/' + self.path
+        if bfile == b'/dev/null':
+            return self.op == b'DELETE'
+        return bfile == b'b/' + self.path
 
     def ispatching(self, afile, bfile):
         return self._ispatchinga(afile) and self._ispatchingb(bfile)
@@ -388,8 +388,8 @@
     gp = None
     gitpatches = []
     for line in lr:
-        line = line.rstrip(' \r\n')
-        if line.startswith('diff --git a/'):
+        line = line.rstrip(b' \r\n')
+        if line.startswith(b'diff --git a/'):
             m = gitre.match(line)
             if m:
                 if gp:
@@ -397,28 +397,28 @@
                 dst = m.group(2)
                 gp = patchmeta(dst)
         elif gp:
-            if line.startswith('--- '):
+            if line.startswith(b'--- '):
                 gitpatches.append(gp)
                 gp = None
                 continue
-            if line.startswith('rename from '):
-                gp.op = 'RENAME'
+            if line.startswith(b'rename from '):
+                gp.op = b'RENAME'
                 gp.oldpath = line[12:]
-            elif line.startswith('rename to '):
+            elif line.startswith(b'rename to '):
                 gp.path = line[10:]
-            elif line.startswith('copy from '):
-                gp.op = 'COPY'
+            elif line.startswith(b'copy from '):
+                gp.op = b'COPY'
                 gp.oldpath = line[10:]
-            elif line.startswith('copy to '):
+            elif line.startswith(b'copy to '):
                 gp.path = line[8:]
-            elif line.startswith('deleted file'):
-                gp.op = 'DELETE'
-            elif line.startswith('new file mode '):
-                gp.op = 'ADD'
+            elif line.startswith(b'deleted file'):
+                gp.op = b'DELETE'
+            elif line.startswith(b'new file mode '):
+                gp.op = b'ADD'
                 gp.setmode(int(line[-6:], 8))
-            elif line.startswith('new mode '):
+            elif line.startswith(b'new mode '):
                 gp.setmode(int(line[-6:], 8))
-            elif line.startswith('GIT binary patch'):
+            elif line.startswith(b'GIT binary patch'):
                 gp.binary = True
     if gp:
         gitpatches.append(gp)
@@ -444,7 +444,7 @@
         return self.fp.readline()
 
     def __iter__(self):
-        return iter(self.readline, '')
+        return iter(self.readline, b'')
 
 
 class abstractbackend(object):
@@ -517,16 +517,16 @@
                 self.opener.setflags(fname, False, True)
 
     def unlink(self, fname):
-        rmdir = self.ui.configbool('experimental', 'removeemptydirs')
+        rmdir = self.ui.configbool(b'experimental', b'removeemptydirs')
         self.opener.unlinkpath(fname, ignoremissing=True, rmdir=rmdir)
 
     def writerej(self, fname, failed, total, lines):
-        fname = fname + ".rej"
+        fname = fname + b".rej"
         self.ui.warn(
-            _("%d out of %d hunks FAILED -- saving rejects to file %s\n")
+            _(b"%d out of %d hunks FAILED -- saving rejects to file %s\n")
             % (failed, total, fname)
         )
-        fp = self.opener(fname, 'w')
+        fp = self.opener(fname, b'w')
         fp.writelines(lines)
         fp.close()
 
@@ -544,8 +544,8 @@
         self.copied = []
 
     def _checkknown(self, fname):
-        if self.repo.dirstate[fname] == '?' and self.exists(fname):
-            raise PatchError(_('cannot patch %s: file is not tracked') % fname)
+        if self.repo.dirstate[fname] == b'?' and self.exists(fname):
+            raise PatchError(_(b'cannot patch %s: file is not tracked') % fname)
 
     def setfile(self, fname, data, mode, copysource):
         self._checkknown(fname)
@@ -596,10 +596,10 @@
             self.size += len(data)
         else:
             if self.opener is None:
-                root = pycompat.mkdtemp(prefix='hg-patch-')
+                root = pycompat.mkdtemp(prefix=b'hg-patch-')
                 self.opener = vfsmod.vfs(root)
             # Avoid filename issues with these simple names
-            fn = '%d' % self.created
+            fn = b'%d' % self.created
             self.opener.write(fn, data)
             self.created += 1
             self.files[fname] = (fn, mode, copied)
@@ -629,7 +629,7 @@
 
     def _checkknown(self, fname):
         if fname not in self.ctx:
-            raise PatchError(_('cannot patch %s: file is not tracked') % fname)
+            raise PatchError(_(b'cannot patch %s: file is not tracked') % fname)
 
     def getfile(self, fname):
         try:
@@ -637,7 +637,7 @@
         except error.LookupError:
             return None, None
         flags = fctx.flags()
-        return fctx.data(), ('l' in flags, 'x' in flags)
+        return fctx.data(), (b'l' in flags, b'x' in flags)
 
     def setfile(self, fname, data, mode, copysource):
         if copysource:
@@ -663,11 +663,11 @@
 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
 unidesc = re.compile(br'@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
 contextdesc = re.compile(br'(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
-eolmodes = ['strict', 'crlf', 'lf', 'auto']
+eolmodes = [b'strict', b'crlf', b'lf', b'auto']
 
 
 class patchfile(object):
-    def __init__(self, ui, gp, backend, store, eolmode='strict'):
+    def __init__(self, ui, gp, backend, store, eolmode=b'strict'):
         self.fname = gp.path
         self.eolmode = eolmode
         self.eol = None
@@ -678,8 +678,8 @@
         self.missing = True
         self.mode = gp.mode
         self.copysource = gp.oldpath
-        self.create = gp.op in ('ADD', 'COPY', 'RENAME')
-        self.remove = gp.op == 'DELETE'
+        self.create = gp.op in (b'ADD', b'COPY', b'RENAME')
+        self.remove = gp.op == b'DELETE'
         if self.copysource is None:
             data, mode = backend.getfile(self.fname)
         else:
@@ -693,15 +693,15 @@
                 self.mode = mode
             if self.lines:
                 # Normalize line endings
-                if self.lines[0].endswith('\r\n'):
-                    self.eol = '\r\n'
-                elif self.lines[0].endswith('\n'):
-                    self.eol = '\n'
-                if eolmode != 'strict':
+                if self.lines[0].endswith(b'\r\n'):
+                    self.eol = b'\r\n'
+                elif self.lines[0].endswith(b'\n'):
+                    self.eol = b'\n'
+                if eolmode != b'strict':
                     nlines = []
                     for l in self.lines:
-                        if l.endswith('\r\n'):
-                            l = l[:-2] + '\n'
+                        if l.endswith(b'\r\n'):
+                            l = l[:-2] + b'\n'
                         nlines.append(l)
                     self.lines = nlines
         else:
@@ -710,11 +710,11 @@
             if self.mode is None:
                 self.mode = (False, False)
         if self.missing:
-            self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
+            self.ui.warn(_(b"unable to find '%s' for patching\n") % self.fname)
             self.ui.warn(
                 _(
-                    "(use '--prefix' to apply patch relative to the "
-                    "current directory)\n"
+                    b"(use '--prefix' to apply patch relative to the "
+                    b"current directory)\n"
                 )
             )
 
@@ -728,29 +728,29 @@
         self.hunks = 0
 
     def writelines(self, fname, lines, mode):
-        if self.eolmode == 'auto':
+        if self.eolmode == b'auto':
             eol = self.eol
-        elif self.eolmode == 'crlf':
-            eol = '\r\n'
+        elif self.eolmode == b'crlf':
+            eol = b'\r\n'
         else:
-            eol = '\n'
-
-        if self.eolmode != 'strict' and eol and eol != '\n':
+            eol = b'\n'
+
+        if self.eolmode != b'strict' and eol and eol != b'\n':
             rawlines = []
             for l in lines:
-                if l and l.endswith('\n'):
+                if l and l.endswith(b'\n'):
                     l = l[:-1] + eol
                 rawlines.append(l)
             lines = rawlines
 
-        self.backend.setfile(fname, ''.join(lines), mode, self.copysource)
+        self.backend.setfile(fname, b''.join(lines), mode, self.copysource)
 
     def printfile(self, warn):
         if self.fileprinted:
             return
         if warn or self.ui.verbose:
             self.fileprinted = True
-        s = _("patching file %s\n") % self.fname
+        s = _(b"patching file %s\n") % self.fname
         if warn:
             self.ui.warn(s)
         else:
@@ -775,18 +775,18 @@
         if not self.rej:
             return
         base = os.path.basename(self.fname)
-        lines = ["--- %s\n+++ %s\n" % (base, base)]
+        lines = [b"--- %s\n+++ %s\n" % (base, base)]
         for x in self.rej:
             for l in x.hunk:
                 lines.append(l)
-                if l[-1:] != '\n':
-                    lines.append("\n\\ No newline at end of file\n")
+                if l[-1:] != b'\n':
+                    lines.append(b"\n\\ No newline at end of file\n")
         self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
 
     def apply(self, h):
         if not h.complete():
             raise PatchError(
-                _("bad hunk #%d %s (%d %d %d %d)")
+                _(b"bad hunk #%d %s (%d %d %d %d)")
                 % (h.number, h.desc, len(h.a), h.lena, len(h.b), h.lenb)
             )
 
@@ -799,11 +799,11 @@
         if self.exists and self.create:
             if self.copysource:
                 self.ui.warn(
-                    _("cannot create %s: destination already " "exists\n")
+                    _(b"cannot create %s: destination already " b"exists\n")
                     % self.fname
                 )
             else:
-                self.ui.warn(_("file %s already exists\n") % self.fname)
+                self.ui.warn(_(b"file %s already exists\n") % self.fname)
             self.rej.append(h)
             return -1
 
@@ -819,8 +819,8 @@
 
         horig = h
         if (
-            self.eolmode in ('crlf', 'lf')
-            or self.eolmode == 'auto'
+            self.eolmode in (b'crlf', b'lf')
+            or self.eolmode == b'auto'
             and self.eol
         ):
             # If new eols are going to be normalized, then normalize
@@ -849,7 +849,9 @@
         for x, s in enumerate(self.lines):
             self.hash.setdefault(s, []).append(x)
 
-        for fuzzlen in pycompat.xrange(self.ui.configint("patch", "fuzz") + 1):
+        for fuzzlen in pycompat.xrange(
+            self.ui.configint(b"patch", b"fuzz") + 1
+        ):
             for toponly in [True, False]:
                 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
                 oldstart = oldstart + self.offset + self.skew
@@ -870,9 +872,9 @@
                         offset = l - orig_start - fuzzlen
                         if fuzzlen:
                             msg = _(
-                                "Hunk #%d succeeded at %d "
-                                "with fuzz %d "
-                                "(offset %d lines).\n"
+                                b"Hunk #%d succeeded at %d "
+                                b"with fuzz %d "
+                                b"(offset %d lines).\n"
                             )
                             self.printfile(True)
                             self.ui.warn(
@@ -880,13 +882,13 @@
                             )
                         else:
                             msg = _(
-                                "Hunk #%d succeeded at %d "
-                                "(offset %d lines).\n"
+                                b"Hunk #%d succeeded at %d "
+                                b"(offset %d lines).\n"
                             )
                             self.ui.note(msg % (h.number, l + 1, offset))
                         return fuzzlen
         self.printfile(True)
-        self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
+        self.ui.warn(_(b"Hunk #%d FAILED at %d\n") % (h.number, orig_start))
         self.rej.append(horig)
         return -1
 
@@ -901,33 +903,33 @@
     """patch header
     """
 
-    diffgit_re = re.compile('diff --git a/(.*) b/(.*)$')
-    diff_re = re.compile('diff -r .* (.*)$')
-    allhunks_re = re.compile('(?:index|deleted file) ')
-    pretty_re = re.compile('(?:new file|deleted file) ')
-    special_re = re.compile('(?:index|deleted|copy|rename|new mode) ')
-    newfile_re = re.compile('(?:new file|copy to|rename to)')
+    diffgit_re = re.compile(b'diff --git a/(.*) b/(.*)$')
+    diff_re = re.compile(b'diff -r .* (.*)$')
+    allhunks_re = re.compile(b'(?:index|deleted file) ')
+    pretty_re = re.compile(b'(?:new file|deleted file) ')
+    special_re = re.compile(b'(?:index|deleted|copy|rename|new mode) ')
+    newfile_re = re.compile(b'(?:new file|copy to|rename to)')
 
     def __init__(self, header):
         self.header = header
         self.hunks = []
 
     def binary(self):
-        return any(h.startswith('index ') for h in self.header)
+        return any(h.startswith(b'index ') for h in self.header)
 
     def pretty(self, fp):
         for h in self.header:
-            if h.startswith('index '):
-                fp.write(_('this modifies a binary file (all or nothing)\n'))
+            if h.startswith(b'index '):
+                fp.write(_(b'this modifies a binary file (all or nothing)\n'))
                 break
             if self.pretty_re.match(h):
                 fp.write(h)
                 if self.binary():
-                    fp.write(_('this is a binary file\n'))
+                    fp.write(_(b'this is a binary file\n'))
                 break
-            if h.startswith('---'):
+            if h.startswith(b'---'):
                 fp.write(
-                    _('%d hunks, %d lines changed\n')
+                    _(b'%d hunks, %d lines changed\n')
                     % (
                         len(self.hunks),
                         sum([max(h.added, h.removed) for h in self.hunks]),
@@ -937,7 +939,7 @@
             fp.write(h)
 
     def write(self, fp):
-        fp.write(''.join(self.header))
+        fp.write(b''.join(self.header))
 
     def allhunks(self):
         return any(self.allhunks_re.match(h) for h in self.header)
@@ -956,7 +958,7 @@
         return self.files()[-1]
 
     def __repr__(self):
-        return '<header %s>' % (' '.join(map(repr, self.files())))
+        return b'<header %s>' % (b' '.join(map(repr, self.files())))
 
     def isnewfile(self):
         return any(self.newfile_re.match(h) for h in self.header)
@@ -1035,8 +1037,8 @@
 
     def countchanges(self, hunk):
         """hunk -> (n+,n-)"""
-        add = len([h for h in hunk if h.startswith('+')])
-        rem = len([h for h in hunk if h.startswith('-')])
+        add = len([h for h in hunk if h.startswith(b'+')])
+        rem = len([h for h in hunk if h.startswith(b'-')])
         return add, rem
 
     def reversehunk(self):
@@ -1046,8 +1048,8 @@
         that, swap fromline/toline and +/- signs while keep other things
         unchanged.
         """
-        m = {'+': '-', '-': '+', '\\': '\\'}
-        hunk = ['%s%s' % (m[l[0:1]], l[1:]) for l in self.hunk]
+        m = {b'+': b'-', b'-': b'+', b'\\': b'\\'}
+        hunk = [b'%s%s' % (m[l[0:1]], l[1:]) for l in self.hunk]
         return recordhunk(
             self.header,
             self.toline,
@@ -1060,21 +1062,21 @@
 
     def write(self, fp):
         delta = len(self.before) + len(self.after)
-        if self.after and self.after[-1] == '\\ No newline at end of file\n':
+        if self.after and self.after[-1] == b'\\ No newline at end of file\n':
             delta -= 1
         fromlen = delta + self.removed
         tolen = delta + self.added
         fp.write(
-            '@@ -%d,%d +%d,%d @@%s\n'
+            b'@@ -%d,%d +%d,%d @@%s\n'
             % (
                 self.fromline,
                 fromlen,
                 self.toline,
                 tolen,
-                self.proc and (' ' + self.proc),
+                self.proc and (b' ' + self.proc),
             )
         )
-        fp.write(''.join(self.before + self.hunk + self.after))
+        fp.write(b''.join(self.before + self.hunk + self.after))
 
     pretty = write
 
@@ -1082,71 +1084,71 @@
         return self.header.filename()
 
     def __repr__(self):
-        return '<hunk %r@%d>' % (self.filename(), self.fromline)
+        return b'<hunk %r@%d>' % (self.filename(), self.fromline)
 
 
 def getmessages():
     return {
-        'multiple': {
-            'apply': _("apply change %d/%d to '%s'?"),
-            'discard': _("discard change %d/%d to '%s'?"),
-            'keep': _("keep change %d/%d to '%s'?"),
-            'record': _("record change %d/%d to '%s'?"),
+        b'multiple': {
+            b'apply': _(b"apply change %d/%d to '%s'?"),
+            b'discard': _(b"discard change %d/%d to '%s'?"),
+            b'keep': _(b"keep change %d/%d to '%s'?"),
+            b'record': _(b"record change %d/%d to '%s'?"),
         },
-        'single': {
-            'apply': _("apply this change to '%s'?"),
-            'discard': _("discard this change to '%s'?"),
-            'keep': _("keep this change to '%s'?"),
-            'record': _("record this change to '%s'?"),
+        b'single': {
+            b'apply': _(b"apply this change to '%s'?"),
+            b'discard': _(b"discard this change to '%s'?"),
+            b'keep': _(b"keep this change to '%s'?"),
+            b'record': _(b"record this change to '%s'?"),
         },
-        'help': {
-            'apply': _(
-                '[Ynesfdaq?]'
-                '$$ &Yes, apply this change'
-                '$$ &No, skip this change'
-                '$$ &Edit this change manually'
-                '$$ &Skip remaining changes to this file'
-                '$$ Apply remaining changes to this &file'
-                '$$ &Done, skip remaining changes and files'
-                '$$ Apply &all changes to all remaining files'
-                '$$ &Quit, applying no changes'
-                '$$ &? (display help)'
+        b'help': {
+            b'apply': _(
+                b'[Ynesfdaq?]'
+                b'$$ &Yes, apply this change'
+                b'$$ &No, skip this change'
+                b'$$ &Edit this change manually'
+                b'$$ &Skip remaining changes to this file'
+                b'$$ Apply remaining changes to this &file'
+                b'$$ &Done, skip remaining changes and files'
+                b'$$ Apply &all changes to all remaining files'
+                b'$$ &Quit, applying no changes'
+                b'$$ &? (display help)'
             ),
-            'discard': _(
-                '[Ynesfdaq?]'
-                '$$ &Yes, discard this change'
-                '$$ &No, skip this change'
-                '$$ &Edit this change manually'
-                '$$ &Skip remaining changes to this file'
-                '$$ Discard remaining changes to this &file'
-                '$$ &Done, skip remaining changes and files'
-                '$$ Discard &all changes to all remaining files'
-                '$$ &Quit, discarding no changes'
-                '$$ &? (display help)'
+            b'discard': _(
+                b'[Ynesfdaq?]'
+                b'$$ &Yes, discard this change'
+                b'$$ &No, skip this change'
+                b'$$ &Edit this change manually'
+                b'$$ &Skip remaining changes to this file'
+                b'$$ Discard remaining changes to this &file'
+                b'$$ &Done, skip remaining changes and files'
+                b'$$ Discard &all changes to all remaining files'
+                b'$$ &Quit, discarding no changes'
+                b'$$ &? (display help)'
             ),
-            'keep': _(
-                '[Ynesfdaq?]'
-                '$$ &Yes, keep this change'
-                '$$ &No, skip this change'
-                '$$ &Edit this change manually'
-                '$$ &Skip remaining changes to this file'
-                '$$ Keep remaining changes to this &file'
-                '$$ &Done, skip remaining changes and files'
-                '$$ Keep &all changes to all remaining files'
-                '$$ &Quit, keeping all changes'
-                '$$ &? (display help)'
+            b'keep': _(
+                b'[Ynesfdaq?]'
+                b'$$ &Yes, keep this change'
+                b'$$ &No, skip this change'
+                b'$$ &Edit this change manually'
+                b'$$ &Skip remaining changes to this file'
+                b'$$ Keep remaining changes to this &file'
+                b'$$ &Done, skip remaining changes and files'
+                b'$$ Keep &all changes to all remaining files'
+                b'$$ &Quit, keeping all changes'
+                b'$$ &? (display help)'
             ),
-            'record': _(
-                '[Ynesfdaq?]'
-                '$$ &Yes, record this change'
-                '$$ &No, skip this change'
-                '$$ &Edit this change manually'
-                '$$ &Skip remaining changes to this file'
-                '$$ Record remaining changes to this &file'
-                '$$ &Done, skip remaining changes and files'
-                '$$ Record &all changes to all remaining files'
-                '$$ &Quit, recording no changes'
-                '$$ &? (display help)'
+            b'record': _(
+                b'[Ynesfdaq?]'
+                b'$$ &Yes, record this change'
+                b'$$ &No, skip this change'
+                b'$$ &Edit this change manually'
+                b'$$ &Skip remaining changes to this file'
+                b'$$ Record remaining changes to this &file'
+                b'$$ &Done, skip remaining changes and files'
+                b'$$ Record &all changes to all remaining files'
+                b'$$ &Quit, recording no changes'
+                b'$$ &? (display help)'
             ),
         },
     }
@@ -1157,7 +1159,7 @@
     messages = getmessages()
 
     if operation is None:
-        operation = 'record'
+        operation = b'record'
 
     def prompt(skipfile, skipall, query, chunk):
         """prompt query, and process base inputs
@@ -1175,14 +1177,14 @@
         if skipfile is not None:
             return skipfile, skipfile, skipall, newpatches
         while True:
-            resps = messages['help'][operation]
+            resps = messages[b'help'][operation]
             # IMPORTANT: keep the last line of this prompt short (<40 english
             # chars is a good target) because of issue6158.
-            r = ui.promptchoice("%s\n(enter ? for help) %s" % (query, resps))
-            ui.write("\n")
+            r = ui.promptchoice(b"%s\n(enter ? for help) %s" % (query, resps))
+            ui.write(b"\n")
             if r == 8:  # ?
                 for c, t in ui.extractchoices(resps)[1]:
-                    ui.write('%s - %s\n' % (c, encoding.lower(t)))
+                    ui.write(b'%s - %s\n' % (c, encoding.lower(t)))
                 continue
             elif r == 0:  # yes
                 ret = True
@@ -1190,16 +1192,16 @@
                 ret = False
             elif r == 2:  # Edit patch
                 if chunk is None:
-                    ui.write(_('cannot edit patch for whole file'))
-                    ui.write("\n")
+                    ui.write(_(b'cannot edit patch for whole file'))
+                    ui.write(b"\n")
                     continue
                 if chunk.header.binary():
-                    ui.write(_('cannot edit patch for binary file'))
-                    ui.write("\n")
+                    ui.write(_(b'cannot edit patch for binary file'))
+                    ui.write(b"\n")
                     continue
                 # Patch comment based on the Git one (based on comment at end of
                 # https://mercurial-scm.org/wiki/RecordExtension)
-                phelp = '---' + _(
+                phelp = b'---' + _(
                     """
 To remove '-' lines, make them ' ' lines (context).
 To remove '+' lines, delete them.
@@ -1213,7 +1215,7 @@
 """
                 )
                 (patchfd, patchfn) = pycompat.mkstemp(
-                    prefix="hg-editor-", suffix=".diff"
+                    prefix=b"hg-editor-", suffix=b".diff"
                 )
                 ncpatchfp = None
                 try:
@@ -1222,25 +1224,27 @@
                     chunk.header.write(f)
                     chunk.write(f)
                     f.write(
-                        ''.join(['# ' + i + '\n' for i in phelp.splitlines()])
+                        b''.join(
+                            [b'# ' + i + b'\n' for i in phelp.splitlines()]
+                        )
                     )
                     f.close()
                     # Start the editor and wait for it to complete
                     editor = ui.geteditor()
                     ret = ui.system(
-                        "%s \"%s\"" % (editor, patchfn),
-                        environ={'HGUSER': ui.username()},
-                        blockedtag='filterpatch',
+                        b"%s \"%s\"" % (editor, patchfn),
+                        environ={b'HGUSER': ui.username()},
+                        blockedtag=b'filterpatch',
                     )
                     if ret != 0:
-                        ui.warn(_("editor exited with exit code %d\n") % ret)
+                        ui.warn(_(b"editor exited with exit code %d\n") % ret)
                         continue
                     # Remove comment lines
                     patchfp = open(patchfn, r'rb')
                     ncpatchfp = stringio()
                     for line in util.iterfile(patchfp):
                         line = util.fromnativeeol(line)
-                        if not line.startswith('#'):
+                        if not line.startswith(b'#'):
                             ncpatchfp.write(line)
                     patchfp.close()
                     ncpatchfp.seek(0)
@@ -1260,7 +1264,7 @@
             elif r == 6:  # all
                 ret = skipall = True
             elif r == 7:  # quit
-                raise error.Abort(_('user quit'))
+                raise error.Abort(_(b'user quit'))
             return ret, skipfile, skipall, newpatches
 
     seen = set()
@@ -1271,15 +1275,15 @@
         pos += len(h.hunks)
         skipfile = None
         fixoffset = 0
-        hdr = ''.join(h.header)
+        hdr = b''.join(h.header)
         if hdr in seen:
             continue
         seen.add(hdr)
         if skipall is None:
             h.pretty(ui)
         files = h.files()
-        msg = _('examine changes to %s?') % _(' and ').join(
-            "'%s'" % f for f in files
+        msg = _(b'examine changes to %s?') % _(b' and ').join(
+            b"'%s'" % f for f in files
         )
         if all(match.exact(f) for f in files):
             r, skipall, np = True, None, None
@@ -1295,10 +1299,10 @@
             if skipfile is None and skipall is None:
                 chunk.pretty(ui)
             if total == 1:
-                msg = messages['single'][operation] % chunk.filename()
+                msg = messages[b'single'][operation] % chunk.filename()
             else:
                 idx = pos - len(h.hunks) + i
-                msg = messages['multiple'][operation] % (
+                msg = messages[b'multiple'][operation] % (
                     idx,
                     total,
                     chunk.filename(),
@@ -1349,8 +1353,8 @@
         def normalize(lines):
             nlines = []
             for line in lines:
-                if line.endswith('\r\n'):
-                    line = line[:-2] + '\n'
+                if line.endswith(b'\r\n'):
+                    line = line[:-2] + b'\n'
                 nlines.append(line)
             return nlines
 
@@ -1370,7 +1374,7 @@
     def read_unified_hunk(self, lr):
         m = unidesc.match(self.desc)
         if not m:
-            raise PatchError(_("bad hunk #%d") % self.number)
+            raise PatchError(_(b"bad hunk #%d") % self.number)
         self.starta, self.lena, self.startb, self.lenb = m.groups()
         if self.lena is None:
             self.lena = 1
@@ -1387,7 +1391,7 @@
                 lr, self.hunk, self.lena, self.lenb, self.a, self.b
             )
         except error.ParseError as e:
-            raise PatchError(_("bad hunk #%d: %s") % (self.number, e))
+            raise PatchError(_(b"bad hunk #%d: %s") % (self.number, e))
         # if we hit eof before finishing out the hunk, the last line will
         # be zero length.  Lets try to fix it up.
         while len(self.hunk[-1]) == 0:
@@ -1402,7 +1406,7 @@
         self.desc = lr.readline()
         m = contextdesc.match(self.desc)
         if not m:
-            raise PatchError(_("bad hunk #%d") % self.number)
+            raise PatchError(_(b"bad hunk #%d") % self.number)
         self.starta, aend = m.groups()
         self.starta = int(self.starta)
         if aend is None:
@@ -1412,18 +1416,18 @@
             self.lena += 1
         for x in pycompat.xrange(self.lena):
             l = lr.readline()
-            if l.startswith('---'):
+            if l.startswith(b'---'):
                 # lines addition, old block is empty
                 lr.push(l)
                 break
             s = l[2:]
-            if l.startswith('- ') or l.startswith('! '):
-                u = '-' + s
-            elif l.startswith('  '):
-                u = ' ' + s
+            if l.startswith(b'- ') or l.startswith(b'! '):
+                u = b'-' + s
+            elif l.startswith(b'  '):
+                u = b' ' + s
             else:
                 raise PatchError(
-                    _("bad hunk #%d old text line %d") % (self.number, x)
+                    _(b"bad hunk #%d old text line %d") % (self.number, x)
                 )
             self.a.append(u)
             self.hunk.append(u)
@@ -1436,7 +1440,7 @@
             l = lr.readline()
         m = contextdesc.match(l)
         if not m:
-            raise PatchError(_("bad hunk #%d") % self.number)
+            raise PatchError(_(b"bad hunk #%d") % self.number)
         self.startb, bend = m.groups()
         self.startb = int(self.startb)
         if bend is None:
@@ -1460,28 +1464,28 @@
                 lr.push(l)
                 break
             s = l[2:]
-            if l.startswith('+ ') or l.startswith('! '):
-                u = '+' + s
-            elif l.startswith('  '):
-                u = ' ' + s
+            if l.startswith(b'+ ') or l.startswith(b'! '):
+                u = b'+' + s
+            elif l.startswith(b'  '):
+                u = b' ' + s
             elif len(self.b) == 0:
                 # line deletions, new block is empty
                 lr.push(l)
                 break
             else:
                 raise PatchError(
-                    _("bad hunk #%d old text line %d") % (self.number, x)
+                    _(b"bad hunk #%d old text line %d") % (self.number, x)
                 )
             self.b.append(s)
             while True:
                 if hunki >= len(self.hunk):
-                    h = ""
+                    h = b""
                 else:
                     h = self.hunk[hunki]
                 hunki += 1
                 if h == u:
                     break
-                elif h.startswith('-'):
+                elif h.startswith(b'-'):
                     continue
                 else:
                     self.hunk.insert(hunki - 1, u)
@@ -1490,15 +1494,15 @@
         if not self.a:
             # this happens when lines were only added to the hunk
             for x in self.hunk:
-                if x.startswith('-') or x.startswith(' '):
+                if x.startswith(b'-') or x.startswith(b' '):
                     self.a.append(x)
         if not self.b:
             # this happens when lines were only deleted from the hunk
             for x in self.hunk:
-                if x.startswith('+') or x.startswith(' '):
+                if x.startswith(b'+') or x.startswith(b' '):
                     self.b.append(x[1:])
         # @@ -start,len +start,len @@
-        self.desc = "@@ -%d,%d +%d,%d @@\n" % (
+        self.desc = b"@@ -%d,%d +%d,%d @@\n" % (
             self.starta,
             self.lena,
             self.startb,
@@ -1528,13 +1532,13 @@
             hlen = len(self.hunk)
             for x in pycompat.xrange(hlen - 1):
                 # the hunk starts with the @@ line, so use x+1
-                if self.hunk[x + 1].startswith(' '):
+                if self.hunk[x + 1].startswith(b' '):
                     top += 1
                 else:
                     break
             if not toponly:
                 for x in pycompat.xrange(hlen - 1):
-                    if self.hunk[hlen - bot - 1].startswith(' '):
+                    if self.hunk[hlen - bot - 1].startswith(b' '):
                         bot += 1
                     else:
                         break
@@ -1557,12 +1561,12 @@
 
 
 class binhunk(object):
-    'A binary patch file.'
+    b'A binary patch file.'
 
     def __init__(self, lr, fname):
         self.text = None
         self.delta = False
-        self.hunk = ['GIT binary patch\n']
+        self.hunk = [b'GIT binary patch\n']
         self._fname = fname
         self._read(lr)
 
@@ -1571,25 +1575,25 @@
 
     def new(self, lines):
         if self.delta:
-            return [applybindelta(self.text, ''.join(lines))]
+            return [applybindelta(self.text, b''.join(lines))]
         return [self.text]
 
     def _read(self, lr):
         def getline(lr, hunk):
             l = lr.readline()
             hunk.append(l)
-            return l.rstrip('\r\n')
+            return l.rstrip(b'\r\n')
 
         while True:
             line = getline(lr, self.hunk)
             if not line:
                 raise PatchError(
-                    _('could not extract "%s" binary data') % self._fname
+                    _(b'could not extract "%s" binary data') % self._fname
                 )
-            if line.startswith('literal '):
+            if line.startswith(b'literal '):
                 size = int(line[8:].rstrip())
                 break
-            if line.startswith('delta '):
+            if line.startswith(b'delta '):
                 size = int(line[6:].rstrip())
                 self.delta = True
                 break
@@ -1597,22 +1601,22 @@
         line = getline(lr, self.hunk)
         while len(line) > 1:
             l = line[0:1]
-            if l <= 'Z' and l >= 'A':
-                l = ord(l) - ord('A') + 1
+            if l <= b'Z' and l >= b'A':
+                l = ord(l) - ord(b'A') + 1
             else:
-                l = ord(l) - ord('a') + 27
+                l = ord(l) - ord(b'a') + 27
             try:
                 dec.append(util.b85decode(line[1:])[:l])
             except ValueError as e:
                 raise PatchError(
-                    _('could not decode "%s" binary patch: %s')
+                    _(b'could not decode "%s" binary patch: %s')
                     % (self._fname, stringutil.forcebytestr(e))
                 )
             line = getline(lr, self.hunk)
-        text = zlib.decompress(''.join(dec))
+        text = zlib.decompress(b''.join(dec))
         if len(text) != size:
             raise PatchError(
-                _('"%s" length is %d bytes, should be %d')
+                _(b'"%s" length is %d bytes, should be %d')
                 % (self._fname, len(text), size)
             )
         self.text = text
@@ -1620,10 +1624,10 @@
 
 def parsefilename(str):
     # --- filename \t|space stuff
-    s = str[4:].rstrip('\r\n')
-    i = s.find('\t')
+    s = str[4:].rstrip(b'\r\n')
+    i = s.find(b'\t')
     if i < 0:
-        i = s.find(' ')
+        i = s.find(b' ')
         if i < 0:
             return s
     return s[:i]
@@ -1687,7 +1691,7 @@
 
     newhunks = []
     for c in hunks:
-        if util.safehasattr(c, 'reversehunk'):
+        if util.safehasattr(c, b'reversehunk'):
             c = c.reversehunk()
         newhunks.append(c)
     return newhunks
@@ -1743,7 +1747,7 @@
         def __init__(self):
             self.fromline = 0
             self.toline = 0
-            self.proc = ''
+            self.proc = b''
             self.header = None
             self.context = []
             self.before = []
@@ -1798,35 +1802,39 @@
             return self.headers
 
         transitions = {
-            'file': {
-                'context': addcontext,
-                'file': newfile,
-                'hunk': addhunk,
-                'range': addrange,
+            b'file': {
+                b'context': addcontext,
+                b'file': newfile,
+                b'hunk': addhunk,
+                b'range': addrange,
             },
-            'context': {
-                'file': newfile,
-                'hunk': addhunk,
-                'range': addrange,
-                'other': addother,
+            b'context': {
+                b'file': newfile,
+                b'hunk': addhunk,
+                b'range': addrange,
+                b'other': addother,
             },
-            'hunk': {'context': addcontext, 'file': newfile, 'range': addrange},
-            'range': {'context': addcontext, 'hunk': addhunk},
-            'other': {'other': addother},
+            b'hunk': {
+                b'context': addcontext,
+                b'file': newfile,
+                b'range': addrange,
+            },
+            b'range': {b'context': addcontext, b'hunk': addhunk},
+            b'other': {b'other': addother},
         }
 
     p = parser()
     fp = stringio()
-    fp.write(''.join(originalchunks))
+    fp.write(b''.join(originalchunks))
     fp.seek(0)
 
-    state = 'context'
+    state = b'context'
     for newstate, data in scanpatch(fp):
         try:
             p.transitions[state][newstate](p, data)
         except KeyError:
             raise PatchError(
-                'unhandled transition: %s -> %s' % (state, newstate)
+                b'unhandled transition: %s -> %s' % (state, newstate)
             )
         state = newstate
     del fp
@@ -1857,26 +1865,26 @@
     pathlen = len(path)
     i = 0
     if strip == 0:
-        return '', prefix + path.rstrip()
+        return b'', prefix + path.rstrip()
     count = strip
     while count > 0:
-        i = path.find('/', i)
+        i = path.find(b'/', i)
         if i == -1:
             raise PatchError(
-                _("unable to strip away %d of %d dirs from %s")
+                _(b"unable to strip away %d of %d dirs from %s")
                 % (count, strip, path)
             )
         i += 1
         # consume '//' in the path
-        while i < pathlen - 1 and path[i : i + 1] == '/':
+        while i < pathlen - 1 and path[i : i + 1] == b'/':
             i += 1
         count -= 1
     return path[:i].lstrip(), prefix + path[i:].rstrip()
 
 
 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip, prefix):
-    nulla = afile_orig == "/dev/null"
-    nullb = bfile_orig == "/dev/null"
+    nulla = afile_orig == b"/dev/null"
+    nullb = bfile_orig == b"/dev/null"
     create = nulla and hunk.starta == 0 and hunk.lena == 0
     remove = nullb and hunk.startb == 0 and hunk.lenb == 0
     abase, afile = pathtransform(afile_orig, strip, prefix)
@@ -1890,8 +1898,8 @@
 
     # some diff programs apparently produce patches where the afile is
     # not /dev/null, but afile starts with bfile
-    abasedir = afile[: afile.rfind('/') + 1]
-    bbasedir = bfile[: bfile.rfind('/') + 1]
+    abasedir = afile[: afile.rfind(b'/') + 1]
+    bbasedir = bfile[: bfile.rfind(b'/') + 1]
     if (
         missing
         and abasedir == bbasedir
@@ -1925,13 +1933,13 @@
         elif not nulla:
             fname = afile
         else:
-            raise PatchError(_("undefined source and destination files"))
+            raise PatchError(_(b"undefined source and destination files"))
 
     gp = patchmeta(fname)
     if create:
-        gp.op = 'ADD'
+        gp.op = b'ADD'
     elif remove:
-        gp.op = 'DELETE'
+        gp.op = b'DELETE'
     return gp
 
 
@@ -1949,7 +1957,7 @@
     def scanwhile(first, p):
         """scan lr while predicate holds"""
         lines = [first]
-        for line in iter(lr.readline, ''):
+        for line in iter(lr.readline, b''):
             if p(line):
                 lines.append(line)
             else:
@@ -1957,33 +1965,33 @@
                 break
         return lines
 
-    for line in iter(lr.readline, ''):
-        if line.startswith('diff --git a/') or line.startswith('diff -r '):
+    for line in iter(lr.readline, b''):
+        if line.startswith(b'diff --git a/') or line.startswith(b'diff -r '):
 
             def notheader(line):
                 s = line.split(None, 1)
-                return not s or s[0] not in ('---', 'diff')
+                return not s or s[0] not in (b'---', b'diff')
 
             header = scanwhile(line, notheader)
             fromfile = lr.readline()
-            if fromfile.startswith('---'):
+            if fromfile.startswith(b'---'):
                 tofile = lr.readline()
                 header += [fromfile, tofile]
             else:
                 lr.push(fromfile)
-            yield 'file', header
-        elif line.startswith(' '):
-            cs = (' ', '\\')
-            yield 'context', scanwhile(line, lambda l: l.startswith(cs))
-        elif line.startswith(('-', '+')):
-            cs = ('-', '+', '\\')
-            yield 'hunk', scanwhile(line, lambda l: l.startswith(cs))
+            yield b'file', header
+        elif line.startswith(b' '):
+            cs = (b' ', b'\\')
+            yield b'context', scanwhile(line, lambda l: l.startswith(cs))
+        elif line.startswith((b'-', b'+')):
+            cs = (b'-', b'+', b'\\')
+            yield b'hunk', scanwhile(line, lambda l: l.startswith(cs))
         else:
             m = lines_re.match(line)
             if m:
-                yield 'range', m.groups()
+                yield b'range', m.groups()
             else:
-                yield 'other', line
+                yield b'other', line
 
 
 def scangitpatch(lr, firstline):
@@ -2021,8 +2029,8 @@
     - ("git", gitchanges): current diff is in git format, gitchanges
     maps filenames to gitpatch records. Unique event.
     """
-    afile = ""
-    bfile = ""
+    afile = b""
+    bfile = b""
     state = None
     hunknum = 0
     emitfile = newfile = False
@@ -2033,66 +2041,71 @@
     context = None
     lr = linereader(fp)
 
-    for x in iter(lr.readline, ''):
+    for x in iter(lr.readline, b''):
         if state == BFILE and (
-            (not context and x.startswith('@'))
-            or (context is not False and x.startswith('***************'))
-            or x.startswith('GIT binary patch')
+            (not context and x.startswith(b'@'))
+            or (context is not False and x.startswith(b'***************'))
+            or x.startswith(b'GIT binary patch')
         ):
             gp = None
             if gitpatches and gitpatches[-1].ispatching(afile, bfile):
                 gp = gitpatches.pop()
-            if x.startswith('GIT binary patch'):
+            if x.startswith(b'GIT binary patch'):
                 h = binhunk(lr, gp.path)
             else:
-                if context is None and x.startswith('***************'):
+                if context is None and x.startswith(b'***************'):
                     context = True
                 h = hunk(x, hunknum + 1, lr, context)
             hunknum += 1
             if emitfile:
                 emitfile = False
-                yield 'file', (afile, bfile, h, gp and gp.copy() or None)
-            yield 'hunk', h
-        elif x.startswith('diff --git a/'):
-            m = gitre.match(x.rstrip(' \r\n'))
+                yield b'file', (afile, bfile, h, gp and gp.copy() or None)
+            yield b'hunk', h
+        elif x.startswith(b'diff --git a/'):
+            m = gitre.match(x.rstrip(b' \r\n'))
             if not m:
                 continue
             if gitpatches is None:
                 # scan whole input for git metadata
                 gitpatches = scangitpatch(lr, x)
-                yield 'git', [
-                    g.copy() for g in gitpatches if g.op in ('COPY', 'RENAME')
+                yield b'git', [
+                    g.copy() for g in gitpatches if g.op in (b'COPY', b'RENAME')
                 ]
                 gitpatches.reverse()
-            afile = 'a/' + m.group(1)
-            bfile = 'b/' + m.group(2)
+            afile = b'a/' + m.group(1)
+            bfile = b'b/' + m.group(2)
             while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
                 gp = gitpatches.pop()
-                yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
+                yield b'file', (
+                    b'a/' + gp.path,
+                    b'b/' + gp.path,
+                    None,
+                    gp.copy(),
+                )
             if not gitpatches:
                 raise PatchError(
-                    _('failed to synchronize metadata for "%s"') % afile[2:]
+                    _(b'failed to synchronize metadata for "%s"') % afile[2:]
                 )
             newfile = True
-        elif x.startswith('---'):
+        elif x.startswith(b'---'):
             # check for a unified diff
             l2 = lr.readline()
-            if not l2.startswith('+++'):
+            if not l2.startswith(b'+++'):
                 lr.push(l2)
                 continue
             newfile = True
             context = False
             afile = parsefilename(x)
             bfile = parsefilename(l2)
-        elif x.startswith('***'):
+        elif x.startswith(b'***'):
             # check for a context diff
             l2 = lr.readline()
-            if not l2.startswith('---'):
+            if not l2.startswith(b'---'):
                 lr.push(l2)
                 continue
             l3 = lr.readline()
             lr.push(l3)
-            if not l3.startswith("***************"):
+            if not l3.startswith(b"***************"):
                 lr.push(l2)
                 continue
             newfile = True
@@ -2108,7 +2121,7 @@
 
     while gitpatches:
         gp = gitpatches.pop()
-        yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
+        yield b'file', (b'a/' + gp.path, b'b/' + gp.path, None, gp.copy())
 
 
 def applybindelta(binchunk, data):
@@ -2124,7 +2137,7 @@
                 return i
         return i
 
-    out = ""
+    out = b""
     s = deltahead(binchunk)
     binchunk = binchunk[s:]
     s = deltahead(binchunk)
@@ -2166,11 +2179,11 @@
             out += binchunk[i:offset_end]
             i += cmd
         else:
-            raise PatchError(_('unexpected delta opcode 0'))
+            raise PatchError(_(b'unexpected delta opcode 0'))
     return out
 
 
-def applydiff(ui, fp, backend, store, strip=1, prefix='', eolmode='strict'):
+def applydiff(ui, fp, backend, store, strip=1, prefix=b'', eolmode=b'strict'):
     """Reads a patch from fp and tries to apply it.
 
     Returns 0 for a clean patch, -1 if any rejects were found and 1 if
@@ -2195,13 +2208,13 @@
 def _canonprefix(repo, prefix):
     if prefix:
         prefix = pathutil.canonpath(repo.root, repo.getcwd(), prefix)
-        if prefix != '':
-            prefix += '/'
+        if prefix != b'':
+            prefix += b'/'
     return prefix
 
 
 def _applydiff(
-    ui, fp, patcher, backend, store, strip=1, prefix='', eolmode='strict'
+    ui, fp, patcher, backend, store, strip=1, prefix=b'', eolmode=b'strict'
 ):
     prefix = _canonprefix(backend.repo, prefix)
 
@@ -2213,13 +2226,13 @@
     current_file = None
 
     for state, values in iterhunks(fp):
-        if state == 'hunk':
+        if state == b'hunk':
             if not current_file:
                 continue
             ret = current_file.apply(values)
             if ret > 0:
                 err = 1
-        elif state == 'file':
+        elif state == b'file':
             if current_file:
                 rejects += current_file.close()
                 current_file = None
@@ -2232,32 +2245,35 @@
                 gp = makepatchmeta(
                     backend, afile, bfile, first_hunk, strip, prefix
                 )
-            if gp.op == 'RENAME':
+            if gp.op == b'RENAME':
                 backend.unlink(gp.oldpath)
             if not first_hunk:
-                if gp.op == 'DELETE':
+                if gp.op == b'DELETE':
                     backend.unlink(gp.path)
                     continue
                 data, mode = None, None
-                if gp.op in ('RENAME', 'COPY'):
+                if gp.op in (b'RENAME', b'COPY'):
                     data, mode = store.getfile(gp.oldpath)[:2]
                     if data is None:
                         # This means that the old path does not exist
                         raise PatchError(
-                            _("source file '%s' does not exist") % gp.oldpath
+                            _(b"source file '%s' does not exist") % gp.oldpath
                         )
                 if gp.mode:
                     mode = gp.mode
-                    if gp.op == 'ADD':
+                    if gp.op == b'ADD':
                         # Added files without content have no hunk and
                         # must be created
-                        data = ''
+                        data = b''
                 if data or mode:
-                    if gp.op in ('ADD', 'RENAME', 'COPY') and backend.exists(
+                    if gp.op in (b'ADD', b'RENAME', b'COPY') and backend.exists(
                         gp.path
                     ):
                         raise PatchError(
-                            _("cannot create %s: destination " "already exists")
+                            _(
+                                b"cannot create %s: destination "
+                                b"already exists"
+                            )
                             % gp.path
                         )
                     backend.setfile(gp.path, data, mode, gp.oldpath)
@@ -2265,11 +2281,11 @@
             try:
                 current_file = patcher(ui, gp, backend, store, eolmode=eolmode)
             except PatchError as inst:
-                ui.warn(str(inst) + '\n')
+                ui.warn(str(inst) + b'\n')
                 current_file = None
                 rejects += 1
                 continue
-        elif state == 'git':
+        elif state == b'git':
             for gp in values:
                 path = pstrip(gp.oldpath)
                 data, mode = backend.getfile(path)
@@ -2282,7 +2298,7 @@
                 else:
                     store.setfile(path, data, mode)
         else:
-            raise error.Abort(_('unsupported parser state: %s') % state)
+            raise error.Abort(_(b'unsupported parser state: %s') % state)
 
     if current_file:
         rejects += current_file.close()
@@ -2300,61 +2316,61 @@
     args = []
     cwd = repo.root
     if cwd:
-        args.append('-d %s' % procutil.shellquote(cwd))
-    cmd = '%s %s -p%d < %s' % (
+        args.append(b'-d %s' % procutil.shellquote(cwd))
+    cmd = b'%s %s -p%d < %s' % (
         patcher,
-        ' '.join(args),
+        b' '.join(args),
         strip,
         procutil.shellquote(patchname),
     )
-    ui.debug('Using external patch tool: %s\n' % cmd)
-    fp = procutil.popen(cmd, 'rb')
+    ui.debug(b'Using external patch tool: %s\n' % cmd)
+    fp = procutil.popen(cmd, b'rb')
     try:
         for line in util.iterfile(fp):
             line = line.rstrip()
-            ui.note(line + '\n')
-            if line.startswith('patching file '):
+            ui.note(line + b'\n')
+            if line.startswith(b'patching file '):
                 pf = util.parsepatchoutput(line)
                 printed_file = False
                 files.add(pf)
-            elif line.find('with fuzz') >= 0:
+            elif line.find(b'with fuzz') >= 0:
                 fuzz = True
                 if not printed_file:
-                    ui.warn(pf + '\n')
+                    ui.warn(pf + b'\n')
                     printed_file = True
-                ui.warn(line + '\n')
-            elif line.find('saving rejects to file') >= 0:
-                ui.warn(line + '\n')
-            elif line.find('FAILED') >= 0:
+                ui.warn(line + b'\n')
+            elif line.find(b'saving rejects to file') >= 0:
+                ui.warn(line + b'\n')
+            elif line.find(b'FAILED') >= 0:
                 if not printed_file:
-                    ui.warn(pf + '\n')
+                    ui.warn(pf + b'\n')
                     printed_file = True
-                ui.warn(line + '\n')
+                ui.warn(line + b'\n')
     finally:
         if files:
             scmutil.marktouched(repo, files, similarity)
     code = fp.close()
     if code:
         raise PatchError(
-            _("patch command failed: %s") % procutil.explainexit(code)
+            _(b"patch command failed: %s") % procutil.explainexit(code)
         )
     return fuzz
 
 
 def patchbackend(
-    ui, backend, patchobj, strip, prefix, files=None, eolmode='strict'
+    ui, backend, patchobj, strip, prefix, files=None, eolmode=b'strict'
 ):
     if files is None:
         files = set()
     if eolmode is None:
-        eolmode = ui.config('patch', 'eol')
+        eolmode = ui.config(b'patch', b'eol')
     if eolmode.lower() not in eolmodes:
-        raise error.Abort(_('unsupported line endings type: %s') % eolmode)
+        raise error.Abort(_(b'unsupported line endings type: %s') % eolmode)
     eolmode = eolmode.lower()
 
     store = filestore()
     try:
-        fp = open(patchobj, 'rb')
+        fp = open(patchobj, b'rb')
     except TypeError:
         fp = patchobj
     try:
@@ -2367,7 +2383,7 @@
         files.update(backend.close())
         store.close()
     if ret < 0:
-        raise PatchError(_('patch failed to apply'))
+        raise PatchError(_(b'patch failed to apply'))
     return ret > 0
 
 
@@ -2376,9 +2392,9 @@
     repo,
     patchobj,
     strip,
-    prefix='',
+    prefix=b'',
     files=None,
-    eolmode='strict',
+    eolmode=b'strict',
     similarity=0,
 ):
     """use builtin patch to apply <patchobj> to the working directory.
@@ -2388,7 +2404,7 @@
 
 
 def patchrepo(
-    ui, repo, ctx, store, patchobj, strip, prefix, files=None, eolmode='strict'
+    ui, repo, ctx, store, patchobj, strip, prefix, files=None, eolmode=b'strict'
 ):
     backend = repobackend(ui, repo, ctx, store)
     return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
@@ -2399,9 +2415,9 @@
     repo,
     patchname,
     strip=1,
-    prefix='',
+    prefix=b'',
     files=None,
-    eolmode='strict',
+    eolmode=b'strict',
     similarity=0,
 ):
     """Apply <patchname> to the working directory.
@@ -2415,7 +2431,7 @@
 
     Returns whether patch was applied with fuzz factor.
     """
-    patcher = ui.config('ui', 'patch')
+    patcher = ui.config(b'ui', b'patch')
     if files is None:
         files = set()
     if patcher:
@@ -2427,13 +2443,13 @@
     )
 
 
-def changedfiles(ui, repo, patchpath, strip=1, prefix=''):
+def changedfiles(ui, repo, patchpath, strip=1, prefix=b''):
     backend = fsbackend(ui, repo.root)
     prefix = _canonprefix(repo, prefix)
-    with open(patchpath, 'rb') as fp:
+    with open(patchpath, b'rb') as fp:
         changed = set()
         for state, values in iterhunks(fp):
-            if state == 'file':
+            if state == b'file':
                 afile, bfile, first_hunk, gp = values
                 if gp:
                     gp.path = pathtransform(gp.path, strip - 1, prefix)[1]
@@ -2446,10 +2462,10 @@
                         backend, afile, bfile, first_hunk, strip, prefix
                     )
                 changed.add(gp.path)
-                if gp.op == 'RENAME':
+                if gp.op == b'RENAME':
                     changed.add(gp.oldpath)
-            elif state not in ('hunk', 'git'):
-                raise error.Abort(_('unsupported parser state: %s') % state)
+            elif state not in (b'hunk', b'git'):
+                raise error.Abort(_(b'unsupported parser state: %s') % state)
         return changed
 
 
@@ -2528,11 +2544,11 @@
             # logcmdutil.getlinerangerevs() for 'hg log -L'.
             assert (
                 fctx2 is not None
-            ), 'fctx2 unexpectly None in diff hunks filtering'
+            ), b'fctx2 unexpectly None in diff hunks filtering'
             hunks = hunksfilterfn(fctx2, hunks)
-        text = ''.join(sum((list(hlines) for hrange, hlines in hunks), []))
+        text = b''.join(sum((list(hlines) for hrange, hlines in hunks), []))
         if hdr and (text or len(hdr) > 1):
-            yield '\n'.join(hdr) + '\n'
+            yield b'\n'.join(hdr) + b'\n'
         if text:
             yield text
 
@@ -2666,39 +2682,39 @@
     """yield tokens for a list of lines in a single hunk"""
     for line in hunklines:
         # chomp
-        chompline = line.rstrip('\r\n')
+        chompline = line.rstrip(b'\r\n')
         # highlight tabs and trailing whitespace
         stripline = chompline.rstrip()
-        if line.startswith('-'):
-            label = 'diff.deleted'
-        elif line.startswith('+'):
-            label = 'diff.inserted'
+        if line.startswith(b'-'):
+            label = b'diff.deleted'
+        elif line.startswith(b'+'):
+            label = b'diff.inserted'
         else:
-            raise error.ProgrammingError('unexpected hunk line: %s' % line)
+            raise error.ProgrammingError(b'unexpected hunk line: %s' % line)
         for token in tabsplitter.findall(stripline):
-            if token.startswith('\t'):
-                yield (token, 'diff.tab')
+            if token.startswith(b'\t'):
+                yield (token, b'diff.tab')
             else:
                 yield (token, label)
 
         if chompline != stripline:
-            yield (chompline[len(stripline) :], 'diff.trailingwhitespace')
+            yield (chompline[len(stripline) :], b'diff.trailingwhitespace')
         if chompline != line:
-            yield (line[len(chompline) :], '')
+            yield (line[len(chompline) :], b'')
 
 
 def diffsinglehunkinline(hunklines):
     """yield tokens for a list of lines in a single hunk, with inline colors"""
     # prepare deleted, and inserted content
-    a = ''
-    b = ''
+    a = b''
+    b = b''
     for line in hunklines:
-        if line[0:1] == '-':
+        if line[0:1] == b'-':
             a += line[1:]
-        elif line[0:1] == '+':
+        elif line[0:1] == b'+':
             b += line[1:]
         else:
-            raise error.ProgrammingError('unexpected hunk line: %s' % line)
+            raise error.ProgrammingError(b'unexpected hunk line: %s' % line)
     # fast path: if either side is empty, use diffsinglehunk
     if not a or not b:
         for t in diffsinglehunk(hunklines):
@@ -2708,25 +2724,25 @@
     al = wordsplitter.findall(a)
     bl = wordsplitter.findall(b)
     # re-arrange the words to lines since the diff algorithm is line-based
-    aln = [s if s == '\n' else s + '\n' for s in al]
-    bln = [s if s == '\n' else s + '\n' for s in bl]
-    an = ''.join(aln)
-    bn = ''.join(bln)
+    aln = [s if s == b'\n' else s + b'\n' for s in al]
+    bln = [s if s == b'\n' else s + b'\n' for s in bl]
+    an = b''.join(aln)
+    bn = b''.join(bln)
     # run the diff algorithm, prepare atokens and btokens
     atokens = []
     btokens = []
     blocks = mdiff.allblocks(an, bn, lines1=aln, lines2=bln)
     for (a1, a2, b1, b2), btype in blocks:
-        changed = btype == '!'
-        for token in mdiff.splitnewlines(''.join(al[a1:a2])):
+        changed = btype == b'!'
+        for token in mdiff.splitnewlines(b''.join(al[a1:a2])):
             atokens.append((changed, token))
-        for token in mdiff.splitnewlines(''.join(bl[b1:b2])):
+        for token in mdiff.splitnewlines(b''.join(bl[b1:b2])):
             btokens.append((changed, token))
 
     # yield deleted tokens, then inserted ones
     for prefix, label, tokens in [
-        ('-', 'diff.deleted', atokens),
-        ('+', 'diff.inserted', btokens),
+        (b'-', b'diff.deleted', atokens),
+        (b'+', b'diff.inserted', btokens),
     ]:
         nextisnewline = True
         for changed, token in tokens:
@@ -2734,10 +2750,10 @@
                 yield (prefix, label)
                 nextisnewline = False
             # special handling line end
-            isendofline = token.endswith('\n')
+            isendofline = token.endswith(b'\n')
             if isendofline:
                 chomp = token[:-1]  # chomp
-                if chomp.endswith('\r'):
+                if chomp.endswith(b'\r'):
                     chomp = chomp[:-1]
                 endofline = token[len(chomp) :]
                 token = chomp.rstrip()  # detect spaces at the end
@@ -2745,17 +2761,17 @@
             # scan tabs
             for maybetab in tabsplitter.findall(token):
                 if b'\t' == maybetab[0:1]:
-                    currentlabel = 'diff.tab'
+                    currentlabel = b'diff.tab'
                 else:
                     if changed:
-                        currentlabel = label + '.changed'
+                        currentlabel = label + b'.changed'
                     else:
-                        currentlabel = label + '.unchanged'
+                        currentlabel = label + b'.unchanged'
                 yield (maybetab, currentlabel)
             if isendofline:
                 if endspaces:
-                    yield (endspaces, 'diff.trailingwhitespace')
-                yield (endofline, '')
+                    yield (endspaces, b'diff.trailingwhitespace')
+                yield (endofline, b'')
                 nextisnewline = True
 
 
@@ -2766,19 +2782,19 @@
     else:
         dodiffhunk = diffsinglehunk
     headprefixes = [
-        ('diff', 'diff.diffline'),
-        ('copy', 'diff.extended'),
-        ('rename', 'diff.extended'),
-        ('old', 'diff.extended'),
-        ('new', 'diff.extended'),
-        ('deleted', 'diff.extended'),
-        ('index', 'diff.extended'),
-        ('similarity', 'diff.extended'),
-        ('---', 'diff.file_a'),
-        ('+++', 'diff.file_b'),
+        (b'diff', b'diff.diffline'),
+        (b'copy', b'diff.extended'),
+        (b'rename', b'diff.extended'),
+        (b'old', b'diff.extended'),
+        (b'new', b'diff.extended'),
+        (b'deleted', b'diff.extended'),
+        (b'index', b'diff.extended'),
+        (b'similarity', b'diff.extended'),
+        (b'---', b'diff.file_a'),
+        (b'+++', b'diff.file_b'),
     ]
     textprefixes = [
-        ('@', 'diff.hunk'),
+        (b'@', b'diff.hunk'),
         # - and + are handled by diffsinglehunk
     ]
     head = False
@@ -2793,17 +2809,19 @@
             hunkbuffer[:] = []
 
     for chunk in func(*args, **kw):
-        lines = chunk.split('\n')
+        lines = chunk.split(b'\n')
         linecount = len(lines)
         for i, line in enumerate(lines):
             if head:
-                if line.startswith('@'):
+                if line.startswith(b'@'):
                     head = False
             else:
-                if line and not line.startswith((' ', '+', '-', '@', '\\')):
+                if line and not line.startswith(
+                    (b' ', b'+', b'-', b'@', b'\\')
+                ):
                     head = True
             diffline = False
-            if not head and line and line.startswith(('+', '-')):
+            if not head and line and line.startswith((b'+', b'-')):
                 diffline = True
 
             prefixes = textprefixes
@@ -2813,7 +2831,7 @@
                 # buffered
                 bufferedline = line
                 if i + 1 < linecount:
-                    bufferedline += "\n"
+                    bufferedline += b"\n"
                 hunkbuffer.append(bufferedline)
             else:
                 # unbuffered
@@ -2826,13 +2844,13 @@
                         if line != stripline:
                             yield (
                                 line[len(stripline) :],
-                                'diff.trailingwhitespace',
+                                b'diff.trailingwhitespace',
                             )
                         break
                 else:
-                    yield (line, '')
+                    yield (line, b'')
                 if i + 1 < linecount:
-                    yield ('\n', '')
+                    yield (b'\n', b'')
         for token in consumehunkbuffer():
             yield token
 
@@ -2862,10 +2880,10 @@
                 if opts.git:
                     f1 = copy[f]
                     if f1 in removedset and f1 not in gone:
-                        copyop = 'rename'
+                        copyop = b'rename'
                         gone.add(f1)
                     else:
-                        copyop = 'copy'
+                        copyop = b'copy'
         elif f in removedset:
             f2 = None
             if opts.git:
@@ -2903,21 +2921,21 @@
 
     def gitindex(text):
         if not text:
-            text = ""
+            text = b""
         l = len(text)
-        s = hashlib.sha1('blob %d\0' % l)
+        s = hashlib.sha1(b'blob %d\0' % l)
         s.update(text)
         return hex(s.digest())
 
     if opts.noprefix:
-        aprefix = bprefix = ''
+        aprefix = bprefix = b''
     else:
-        aprefix = 'a/'
-        bprefix = 'b/'
+        aprefix = b'a/'
+        bprefix = b'b/'
 
     def diffline(f, revs):
-        revinfo = ' '.join(["-r %s" % rev for rev in revs])
-        return 'diff %s %s' % (revinfo, f)
+        revinfo = b' '.join([b"-r %s" % rev for rev in revs])
+        return b'diff %s %s' % (revinfo, f)
 
     def isempty(fctx):
         return fctx is None or fctx.size() == 0
@@ -2925,7 +2943,7 @@
     date1 = dateutil.datestr(ctx1.date())
     date2 = dateutil.datestr(ctx2.date())
 
-    gitmode = {'l': '120000', 'x': '100755', '': '100644'}
+    gitmode = {b'l': b'120000', b'x': b'100755', b'': b'100644'}
 
     if not pathfn:
         pathfn = lambda f: f
@@ -2977,23 +2995,23 @@
         header = []
         if opts.git:
             header.append(
-                'diff --git %s%s %s%s' % (aprefix, path1, bprefix, path2)
+                b'diff --git %s%s %s%s' % (aprefix, path1, bprefix, path2)
             )
             if not f1:  # added
-                header.append('new file mode %s' % gitmode[flag2])
+                header.append(b'new file mode %s' % gitmode[flag2])
             elif not f2:  # removed
-                header.append('deleted file mode %s' % gitmode[flag1])
+                header.append(b'deleted file mode %s' % gitmode[flag1])
             else:  # modified/copied/renamed
                 mode1, mode2 = gitmode[flag1], gitmode[flag2]
                 if mode1 != mode2:
-                    header.append('old mode %s' % mode1)
-                    header.append('new mode %s' % mode2)
+                    header.append(b'old mode %s' % mode1)
+                    header.append(b'new mode %s' % mode2)
                 if copyop is not None:
                     if opts.showsimilarity:
                         sim = similar.score(ctx1[path1], ctx2[path2]) * 100
-                        header.append('similarity index %d%%' % sim)
-                    header.append('%s from %s' % (copyop, path1))
-                    header.append('%s to %s' % (copyop, path2))
+                        header.append(b'similarity index %d%%' % sim)
+                    header.append(b'%s from %s' % (copyop, path1))
+                    header.append(b'%s to %s' % (copyop, path2))
         elif revs:
             header.append(diffline(path1, revs))
 
@@ -3032,7 +3050,7 @@
             text = mdiff.b85diff(content1, content2)
             if text:
                 header.append(
-                    'index %s..%s' % (gitindex(content1), gitindex(content2))
+                    b'index %s..%s' % (gitindex(content1), gitindex(content2))
                 )
             hunks = ((None, [text]),)
         else:
@@ -3041,7 +3059,7 @@
                 if flag is None:
                     flag = flag2
                 header.append(
-                    'index %s..%s %s'
+                    b'index %s..%s %s'
                     % (
                         gitindex(content1)[0 : opts.index],
                         gitindex(content2)[0 : opts.index],
@@ -3091,31 +3109,31 @@
     inheader = False
 
     for line in lines:
-        if line.startswith('diff'):
+        if line.startswith(b'diff'):
             addresult()
             # starting a new file diff
             # set numbers to 0 and reset inheader
             inheader = True
             adds, removes, isbinary = 0, 0, False
-            if line.startswith('diff --git a/'):
+            if line.startswith(b'diff --git a/'):
                 filename = gitre.search(line).group(2)
-            elif line.startswith('diff -r'):
+            elif line.startswith(b'diff -r'):
                 # format: "diff -r ... -r ... filename"
                 filename = diffre.search(line).group(1)
-        elif line.startswith('@@'):
+        elif line.startswith(b'@@'):
             inheader = False
-        elif line.startswith('+') and not inheader:
+        elif line.startswith(b'+') and not inheader:
             adds += 1
-        elif line.startswith('-') and not inheader:
+        elif line.startswith(b'-') and not inheader:
             removes += 1
-        elif line.startswith('GIT binary patch') or line.startswith(
-            'Binary file'
+        elif line.startswith(b'GIT binary patch') or line.startswith(
+            b'Binary file'
         ):
             isbinary = True
-        elif line.startswith('rename from'):
+        elif line.startswith(b'rename from'):
             filename = line[12:]
-        elif line.startswith('rename to'):
-            filename += ' => %s' % line[10:]
+        elif line.startswith(b'rename to'):
+            filename += b' => %s' % line[10:]
     addresult()
     return results
 
@@ -3142,16 +3160,16 @@
 
     for filename, adds, removes, isbinary in stats:
         if isbinary:
-            count = 'Bin'
+            count = b'Bin'
         else:
-            count = '%d' % (adds + removes)
-        pluses = '+' * scale(adds)
-        minuses = '-' * scale(removes)
+            count = b'%d' % (adds + removes)
+        pluses = b'+' * scale(adds)
+        minuses = b'-' * scale(removes)
         output.append(
-            ' %s%s |  %*s %s%s\n'
+            b' %s%s |  %*s %s%s\n'
             % (
                 filename,
-                ' ' * (maxname - encoding.colwidth(filename)),
+                b' ' * (maxname - encoding.colwidth(filename)),
                 countwidth,
                 count,
                 pluses,
@@ -3161,11 +3179,11 @@
 
     if stats:
         output.append(
-            _(' %d files changed, %d insertions(+), ' '%d deletions(-)\n')
+            _(b' %d files changed, %d insertions(+), ' b'%d deletions(-)\n')
             % (len(stats), totaladds, totalremoves)
         )
 
-    return ''.join(output)
+    return b''.join(output)
 
 
 def diffstatui(*args, **kw):
@@ -3174,15 +3192,15 @@
     '''
 
     for line in diffstat(*args, **kw).splitlines():
-        if line and line[-1] in '+-':
-            name, graph = line.rsplit(' ', 1)
-            yield (name + ' ', '')
+        if line and line[-1] in b'+-':
+            name, graph = line.rsplit(b' ', 1)
+            yield (name + b' ', b'')
             m = re.search(br'\++', graph)
             if m:
-                yield (m.group(0), 'diffstat.inserted')
+                yield (m.group(0), b'diffstat.inserted')
             m = re.search(br'-+', graph)
             if m:
-                yield (m.group(0), 'diffstat.deleted')
+                yield (m.group(0), b'diffstat.deleted')
         else:
-            yield (line, '')
-        yield ('\n', '')
+            yield (line, b'')
+        yield (b'\n', b'')
--- a/mercurial/pathutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/pathutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -63,30 +63,30 @@
             return
         # AIX ignores "/" at end of path, others raise EISDIR.
         if util.endswithsep(path):
-            raise error.Abort(_("path ends in directory separator: %s") % path)
+            raise error.Abort(_(b"path ends in directory separator: %s") % path)
         parts = util.splitpath(path)
         if (
             os.path.splitdrive(path)[0]
-            or _lowerclean(parts[0]) in ('.hg', '.hg.', '')
+            or _lowerclean(parts[0]) in (b'.hg', b'.hg.', b'')
             or pycompat.ospardir in parts
         ):
-            raise error.Abort(_("path contains illegal component: %s") % path)
+            raise error.Abort(_(b"path contains illegal component: %s") % path)
         # Windows shortname aliases
         for p in parts:
-            if "~" in p:
-                first, last = p.split("~", 1)
-                if last.isdigit() and first.upper() in ["HG", "HG8B6C"]:
+            if b"~" in p:
+                first, last = p.split(b"~", 1)
+                if last.isdigit() and first.upper() in [b"HG", b"HG8B6C"]:
                     raise error.Abort(
-                        _("path contains illegal component: %s") % path
+                        _(b"path contains illegal component: %s") % path
                     )
-        if '.hg' in _lowerclean(path):
+        if b'.hg' in _lowerclean(path):
             lparts = [_lowerclean(p.lower()) for p in parts]
-            for p in '.hg', '.hg.':
+            for p in b'.hg', b'.hg.':
                 if p in lparts[1:]:
                     pos = lparts.index(p)
                     base = os.path.join(*parts[:pos])
                     raise error.Abort(
-                        _("path '%s' is inside nested repo %r")
+                        _(b"path '%s' is inside nested repo %r")
                         % (path, pycompat.bytestr(base))
                     )
 
@@ -126,16 +126,16 @@
                 raise
         else:
             if stat.S_ISLNK(st.st_mode):
-                msg = _('path %r traverses symbolic link %r') % (
+                msg = _(b'path %r traverses symbolic link %r') % (
                     pycompat.bytestr(path),
                     pycompat.bytestr(prefix),
                 )
                 raise error.Abort(msg)
             elif stat.S_ISDIR(st.st_mode) and os.path.isdir(
-                os.path.join(curpath, '.hg')
+                os.path.join(curpath, b'.hg')
             ):
                 if not self.callback or not self.callback(curpath):
-                    msg = _("path '%s' is inside nested repo %r")
+                    msg = _(b"path '%s' is inside nested repo %r")
                     raise error.Abort(msg % (path, pycompat.bytestr(prefix)))
 
     def check(self, path):
@@ -203,7 +203,7 @@
         auditor(name)
         return util.pconvert(name)
     elif name == root:
-        return ''
+        return b''
     else:
         # Determine whether `name' is in the hierarchy at or beneath `root',
         # by iterating name=dirname(name) until that causes no change (can't
@@ -219,7 +219,7 @@
             if s:
                 if not rel:
                     # name was actually the same as root (maybe a symlink)
-                    return ''
+                    return b''
                 rel.reverse()
                 name = os.path.join(*rel)
                 auditor(name)
@@ -236,15 +236,15 @@
         try:
             if cwd != root:
                 canonpath(root, root, myname, auditor)
-                relpath = util.pathto(root, cwd, '')
+                relpath = util.pathto(root, cwd, b'')
                 if relpath.endswith(pycompat.ossep):
                     relpath = relpath[:-1]
-                hint = _("consider using '--cwd %s'") % relpath
+                hint = _(b"consider using '--cwd %s'") % relpath
         except error.Abort:
             pass
 
         raise error.Abort(
-            _("%s not under root '%s'") % (myname, root), hint=hint
+            _(b"%s not under root '%s'") % (myname, root), hint=hint
         )
 
 
--- a/mercurial/phases.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/phases.py	Sun Oct 06 09:48:39 2019 -0400
@@ -121,7 +121,7 @@
     util,
 )
 
-_fphasesentry = struct.Struct('>i20s')
+_fphasesentry = struct.Struct(b'>i20s')
 
 INTERNAL_FLAG = 64  # Phases for mercurial internal usage only
 HIDEABLE_FLAG = 32  # Phases that are hideable
@@ -133,11 +133,11 @@
 allphases = range(internal + 1)
 trackedphases = allphases[1:]
 # record phase names
-cmdphasenames = ['public', 'draft', 'secret']  # known to `hg phase` command
+cmdphasenames = [b'public', b'draft', b'secret']  # known to `hg phase` command
 phasenames = [None] * len(allphases)
 phasenames[: len(cmdphasenames)] = cmdphasenames
-phasenames[archived] = 'archived'
-phasenames[internal] = 'internal'
+phasenames[archived] = b'archived'
+phasenames[internal] = b'internal'
 # record phase property
 mutablephases = tuple(allphases[1:])
 remotehiddenphases = tuple(allphases[2:])
@@ -146,7 +146,7 @@
 
 def supportinternal(repo):
     """True if the internal phase can be used on a repository"""
-    return 'internal-phase' in repo.requirements
+    return b'internal-phase' in repo.requirements
 
 
 def _readroots(repo, phasedefaults=None):
@@ -164,7 +164,7 @@
     dirty = False
     roots = [set() for i in allphases]
     try:
-        f, pending = txnutil.trypending(repo.root, repo.svfs, 'phaseroots')
+        f, pending = txnutil.trypending(repo.root, repo.svfs, b'phaseroots')
         try:
             for line in f:
                 phase, nh = line.split()
@@ -191,7 +191,7 @@
     for phase, nodes in enumerate(phasemapping):
         for head in nodes:
             binarydata.append(_fphasesentry.pack(phase, head))
-    return ''.join(binarydata)
+    return b''.join(binarydata)
 
 
 def binarydecode(stream):
@@ -204,7 +204,7 @@
         entry = stream.read(entrysize)
         if len(entry) < entrysize:
             if entry:
-                raise error.Abort(_('bad phase-heads stream'))
+                raise error.Abort(_(b'bad phase-heads stream'))
             break
         phase, node = _fphasesentry.unpack(entry)
         headsbyphase[phase].append(node)
@@ -281,11 +281,11 @@
     def replace(self, phcache):
         """replace all values in 'self' with content of phcache"""
         for a in (
-            'phaseroots',
-            'dirty',
-            'opener',
-            '_loadedrevslen',
-            '_phasesets',
+            b'phaseroots',
+            b'dirty',
+            b'opener',
+            b'_loadedrevslen',
+            b'_phasesets',
         ):
             setattr(self, a, getattr(phcache, a))
 
@@ -336,7 +336,7 @@
         if rev == nullrev:
             return public
         if rev < nullrev:
-            raise ValueError(_('cannot lookup negative revision'))
+            raise ValueError(_(b'cannot lookup negative revision'))
         if rev >= self._loadedrevslen:
             self.invalidate()
             self.loadphaserevs(repo)
@@ -348,7 +348,7 @@
     def write(self):
         if not self.dirty:
             return
-        f = self.opener('phaseroots', 'w', atomictemp=True, checkambig=True)
+        f = self.opener(b'phaseroots', b'w', atomictemp=True, checkambig=True)
         try:
             self._write(f)
         finally:
@@ -357,7 +357,7 @@
     def _write(self, fp):
         for phase, roots in enumerate(self.phaseroots):
             for h in sorted(roots):
-                fp.write('%i %s\n' % (phase, hex(h)))
+                fp.write(b'%i %s\n' % (phase, hex(h)))
         self.dirty = False
 
     def _updateroots(self, phase, newroots, tr):
@@ -365,14 +365,14 @@
         self.invalidate()
         self.dirty = True
 
-        tr.addfilegenerator('phase', ('phaseroots',), self._write)
-        tr.hookargs['phases_moved'] = '1'
+        tr.addfilegenerator(b'phase', (b'phaseroots',), self._write)
+        tr.hookargs[b'phases_moved'] = b'1'
 
     def registernew(self, repo, tr, targetphase, nodes):
         repo = repo.unfiltered()
         self._retractboundary(repo, tr, targetphase, nodes)
-        if tr is not None and 'phases' in tr.changes:
-            phasetracking = tr.changes['phases']
+        if tr is not None and b'phases' in tr.changes:
+            phasetracking = tr.changes[b'phases']
             torev = repo.changelog.rev
             phase = self.phase
             for n in nodes:
@@ -395,7 +395,7 @@
         if tr is None:
             phasetracking = None
         else:
-            phasetracking = tr.changes.get('phases')
+            phasetracking = tr.changes.get(b'phases')
 
         repo = repo.unfiltered()
 
@@ -411,7 +411,7 @@
 
             olds = self.phaseroots[phase]
 
-            affected = repo.revs('%ln::%ln', olds, nodes)
+            affected = repo.revs(b'%ln::%ln', olds, nodes)
             changes.update(affected)
             if dryrun:
                 continue
@@ -422,7 +422,7 @@
 
             roots = set(
                 ctx.node()
-                for ctx in repo.set('roots((%ln::) - %ld)', olds, affected)
+                for ctx in repo.set(b'roots((%ln::) - %ld)', olds, affected)
             )
             if olds != roots:
                 self._updateroots(phase, roots, tr)
@@ -440,7 +440,7 @@
         if tr is None:
             phasetracking = None
         else:
-            phasetracking = tr.changes.get('phases')
+            phasetracking = tr.changes.get(b'phases')
         repo = repo.unfiltered()
         if (
             self._retractboundary(repo, tr, targetphase, nodes)
@@ -450,13 +450,13 @@
             # find the affected revisions
             new = self.phaseroots[targetphase]
             old = oldroots[targetphase]
-            affected = set(repo.revs('(%ln::) - (%ln::)', new, old))
+            affected = set(repo.revs(b'(%ln::) - (%ln::)', new, old))
 
             # find the phase of the affected revision
             for phase in pycompat.xrange(targetphase, -1, -1):
                 if phase:
                     roots = oldroots[phase]
-                    revs = set(repo.revs('%ln::%ld', roots, affected))
+                    revs = set(repo.revs(b'%ln::%ld', roots, affected))
                     affected -= revs
                 else:  # public phase
                     revs = affected
@@ -469,7 +469,7 @@
         # phaseroots values, replace them.
         if targetphase in (archived, internal) and not supportinternal(repo):
             name = phasenames[targetphase]
-            msg = 'this repository does not support the %s phase' % name
+            msg = b'this repository does not support the %s phase' % name
             raise error.ProgrammingError(msg)
 
         repo = repo.unfiltered()
@@ -481,7 +481,7 @@
         if newroots:
 
             if nullid in newroots:
-                raise error.Abort(_('cannot change null revision phase'))
+                raise error.Abort(_(b'cannot change null revision phase'))
             currentroots = currentroots.copy()
             currentroots.update(newroots)
 
@@ -491,7 +491,7 @@
             aboveroots = [
                 n for n in currentroots if repo[n].rev() >= minnewroot
             ]
-            updatedroots = repo.set('roots(%ln::)', aboveroots)
+            updatedroots = repo.set(b'roots(%ln::)', aboveroots)
 
             finalroots = set(
                 n for n in currentroots if repo[n].rev() < minnewroot
@@ -514,7 +514,7 @@
             if missing:
                 for mnode in missing:
                     repo.ui.debug(
-                        'removing unknown node %s from %i-phase boundary\n'
+                        b'removing unknown node %s from %i-phase boundary\n'
                         % (short(mnode), phase)
                     )
                 nodes.symmetric_difference_update(missing)
@@ -581,7 +581,7 @@
     """List phases root for serialization over pushkey"""
     # Use ordered dictionary so behavior is deterministic.
     keys = util.sortdict()
-    value = '%i' % draft
+    value = b'%i' % draft
     cl = repo.unfiltered().changelog
     for root in repo._phasecache.phaseroots[draft]:
         if repo._phasecache.phase(repo, cl.rev(root)) <= draft:
@@ -604,7 +604,7 @@
         #
         # The server can't handle it on it's own as it has no idea of
         # client phase data.
-        keys['publishing'] = 'True'
+        keys[b'publishing'] = b'True'
     return keys
 
 
@@ -616,7 +616,7 @@
         newphase = abs(int(newphasestr))  # let's avoid negative index surprise
         oldphase = abs(int(oldphasestr))  # let's avoid negative index surprise
         if currentphase == oldphase and newphase < oldphase:
-            with repo.transaction('pushkey-phase') as tr:
+            with repo.transaction(b'pushkey-phase') as tr:
                 advanceboundary(repo, tr, newphase, [bin(nhex)])
             return True
         elif currentphase == newphase:
@@ -638,7 +638,7 @@
     # No need to keep track of secret phase; any heads in the subset that
     # are not mentioned are implicitly secret.
     for phase in allphases[:secret]:
-        revset = "heads(%%ln & %s())" % phasenames[phase]
+        revset = b"heads(%%ln & %s())" % phasenames[phase]
         headsbyphase[phase] = [cl.node(r) for r in repo.revs(revset, subset)]
     return headsbyphase
 
@@ -651,7 +651,7 @@
     # to update. This avoid creating empty transaction during no-op operation.
 
     for phase in allphases[:-1]:
-        revset = '%ln - _phase(%s)'
+        revset = b'%ln - _phase(%s)'
         heads = [c.node() for c in repo.set(revset, headsbyphase[phase], phase)]
         if heads:
             advanceboundary(repo, trgetter(), phase, heads)
@@ -670,14 +670,17 @@
     draftroots = []
     nodemap = repo.changelog.nodemap  # to filter unknown nodes
     for nhex, phase in roots.iteritems():
-        if nhex == 'publishing':  # ignore data related to publish option
+        if nhex == b'publishing':  # ignore data related to publish option
             continue
         node = bin(nhex)
         phase = int(phase)
         if phase == public:
             if node != nullid:
                 repo.ui.warn(
-                    _('ignoring inconsistent public root' ' from remote: %s\n')
+                    _(
+                        b'ignoring inconsistent public root'
+                        b' from remote: %s\n'
+                    )
                     % nhex
                 )
         elif phase == draft:
@@ -685,7 +688,7 @@
                 draftroots.append(node)
         else:
             repo.ui.warn(
-                _('ignoring unexpected root from remote: %i %s\n')
+                _(b'ignoring unexpected root from remote: %i %s\n')
                 % (phase, nhex)
             )
     # compute heads
@@ -706,12 +709,12 @@
         unfi = repo.unfiltered()
         self._allremoteroots = remoteroots
 
-        self.publishing = remoteroots.get('publishing', False)
+        self.publishing = remoteroots.get(b'publishing', False)
 
         ana = analyzeremotephases(repo, remotesubset, remoteroots)
         self.publicheads, self.draftroots = ana
         # Get the list of all "heads" revs draft on remote
-        dheads = unfi.set('heads(%ln::%ln)', self.draftroots, remotesubset)
+        dheads = unfi.set(b'heads(%ln::%ln)', self.draftroots, remotesubset)
         self.draftheads = [c.node() for c in dheads]
 
 
@@ -735,19 +738,19 @@
     new_heads = set(rev(n) for n in heads if n != nullid)
     roots = [rev(n) for n in roots]
     # compute the area we need to remove
-    affected_zone = repo.revs("(%ld::%ld)", roots, new_heads)
+    affected_zone = repo.revs(b"(%ld::%ld)", roots, new_heads)
     # heads in the area are no longer heads
     new_heads.difference_update(affected_zone)
     # revisions in the area have children outside of it,
     # They might be new heads
     candidates = repo.revs(
-        "parents(%ld + (%ld and merge())) and not null", roots, affected_zone
+        b"parents(%ld + (%ld and merge())) and not null", roots, affected_zone
     )
     candidates -= affected_zone
     if new_heads or candidates:
         # remove candidate that are ancestors of other heads
         new_heads.update(candidates)
-        prunestart = repo.revs("parents(%ld) and not null", new_heads)
+        prunestart = repo.revs(b"parents(%ld) and not null", new_heads)
         pruned = dagop.reachableroots(repo, candidates, prunestart)
         new_heads.difference_update(pruned)
 
@@ -760,14 +763,14 @@
     Handle all possible values for the phases.new-commit options.
 
     """
-    v = ui.config('phases', 'new-commit')
+    v = ui.config(b'phases', b'new-commit')
     try:
         return phasenames.index(v)
     except ValueError:
         try:
             return int(v)
         except ValueError:
-            msg = _("phases.new-commit: not a valid phase name ('%s')")
+            msg = _(b"phases.new-commit: not a valid phase name ('%s')")
             raise error.ConfigError(msg % v)
 
 
@@ -778,7 +781,7 @@
 
 def preparehookargs(node, old, new):
     if old is None:
-        old = ''
+        old = b''
     else:
         old = phasenames[old]
-    return {'node': node, 'oldphase': old, 'phase': phasenames[new]}
+    return {b'node': node, b'oldphase': old, b'phase': phasenames[new]}
--- a/mercurial/posix.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/posix.py	Sun Oct 06 09:48:39 2019 -0400
@@ -41,7 +41,7 @@
     # to copies.
     def oslink(src, dst):
         raise OSError(
-            errno.EINVAL, 'hardlinks not supported: %s to %s' % (src, dst)
+            errno.EINVAL, b'hardlinks not supported: %s to %s' % (src, dst)
         )
 
 
@@ -85,13 +85,13 @@
     ...           b'']:
     ...     assert split(f) == posixpath.split(f), f
     '''
-    ht = p.rsplit('/', 1)
+    ht = p.rsplit(b'/', 1)
     if len(ht) == 1:
-        return '', p
-    nh = ht[0].rstrip('/')
+        return b'', p
+    nh = ht[0].rstrip(b'/')
     if nh:
         return nh, ht[1]
-    return ht[0] + '/', ht[1]
+    return ht[0] + b'/', ht[1]
 
 
 def openhardlinks():
@@ -107,25 +107,25 @@
 def parsepatchoutput(output_line):
     """parses the output produced by patch and returns the filename"""
     pf = output_line[14:]
-    if pycompat.sysplatform == 'OpenVMS':
-        if pf[0] == '`':
+    if pycompat.sysplatform == b'OpenVMS':
+        if pf[0] == b'`':
             pf = pf[1:-1]  # Remove the quotes
     else:
-        if pf.startswith("'") and pf.endswith("'") and " " in pf:
+        if pf.startswith(b"'") and pf.endswith(b"'") and b" " in pf:
             pf = pf[1:-1]  # Remove the quotes
     return pf
 
 
 def sshargs(sshcmd, host, user, port):
     '''Build argument list for ssh'''
-    args = user and ("%s@%s" % (user, host)) or host
-    if '-' in args[:1]:
+    args = user and (b"%s@%s" % (user, host)) or host
+    if b'-' in args[:1]:
         raise error.Abort(
-            _('illegal ssh hostname or username starting with -: %s') % args
+            _(b'illegal ssh hostname or username starting with -: %s') % args
         )
     args = shellquote(args)
     if port:
-        args = '-p %s %s' % (shellquote(port), args)
+        args = b'-p %s %s' % (shellquote(port), args)
     return args
 
 
@@ -140,7 +140,7 @@
     if l:
         if not stat.S_ISLNK(s):
             # switch file to link
-            fp = open(f, 'rb')
+            fp = open(f, b'rb')
             data = fp.read()
             fp.close()
             unlink(f)
@@ -148,7 +148,7 @@
                 os.symlink(data, f)
             except OSError:
                 # failed to make a link, rewrite file
-                fp = open(f, "wb")
+                fp = open(f, b"wb")
                 fp.write(data)
                 fp.close()
         # no chmod needed at this point
@@ -157,7 +157,7 @@
         # switch link to file
         data = os.readlink(f)
         unlink(f)
-        fp = open(f, "wb")
+        fp = open(f, b"wb")
         fp.write(data)
         fp.close()
         s = 0o666 & ~umask  # avoid restatting for chmod
@@ -165,10 +165,10 @@
     sx = s & 0o100
     if st.st_nlink > 1 and bool(x) != bool(sx):
         # the file is a hardlink, break it
-        with open(f, "rb") as fp:
+        with open(f, b"rb") as fp:
             data = fp.read()
         unlink(f)
-        with open(f, "wb") as fp:
+        with open(f, b"wb") as fp:
             fp.write(data)
 
     if x and not sx:
@@ -215,9 +215,9 @@
 
     try:
         EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
-        basedir = os.path.join(path, '.hg')
-        cachedir = os.path.join(basedir, 'wcache')
-        storedir = os.path.join(basedir, 'store')
+        basedir = os.path.join(path, b'.hg')
+        cachedir = os.path.join(basedir, b'wcache')
+        storedir = os.path.join(basedir, b'store')
         if not os.path.exists(cachedir):
             try:
                 # we want to create the 'cache' directory, not the '.hg' one.
@@ -232,8 +232,8 @@
                 # we other fallback logic triggers
                 pass
         if os.path.isdir(cachedir):
-            checkisexec = os.path.join(cachedir, 'checkisexec')
-            checknoexec = os.path.join(cachedir, 'checknoexec')
+            checkisexec = os.path.join(cachedir, b'checkisexec')
+            checknoexec = os.path.join(cachedir, b'checknoexec')
 
             try:
                 m = os.stat(checkisexec).st_mode
@@ -250,7 +250,7 @@
                     except OSError as e:
                         if e.errno != errno.ENOENT:
                             raise
-                        open(checknoexec, 'w').close()  # might fail
+                        open(checknoexec, b'w').close()  # might fail
                         m = os.stat(checknoexec).st_mode
                     if m & EXECFLAGS == 0:
                         # check-exec is exec and check-no-exec is not exec
@@ -266,7 +266,7 @@
             # check directly in path and don't leave checkisexec behind
             checkdir = path
             checkisexec = None
-        fh, fn = pycompat.mkstemp(dir=checkdir, prefix='hg-checkexec-')
+        fh, fn = pycompat.mkstemp(dir=checkdir, prefix=b'hg-checkexec-')
         try:
             os.close(fh)
             m = os.stat(fn).st_mode
@@ -290,8 +290,8 @@
     # mktemp is not racy because symlink creation will fail if the
     # file already exists
     while True:
-        cachedir = os.path.join(path, '.hg', 'wcache')
-        checklink = os.path.join(cachedir, 'checklink')
+        cachedir = os.path.join(path, b'.hg', b'wcache')
+        checklink = os.path.join(cachedir, b'checklink')
         # try fast path, read only
         if os.path.islink(checklink):
             return True
@@ -308,16 +308,16 @@
             fd = None
             if cachedir is None:
                 fd = pycompat.namedtempfile(
-                    dir=checkdir, prefix='hg-checklink-'
+                    dir=checkdir, prefix=b'hg-checklink-'
                 )
                 target = os.path.basename(fd.name)
             else:
                 # create a fixed file to link to; doesn't matter if it
                 # already exists.
-                target = 'checklink-target'
+                target = b'checklink-target'
                 try:
                     fullpath = os.path.join(cachedir, target)
-                    open(fullpath, 'w').close()
+                    open(fullpath, b'w').close()
                 except IOError as inst:
                     if inst[0] == errno.EACCES:
                         # If we can't write to cachedir, just pretend
@@ -444,7 +444,7 @@
             u = path.decode('utf-8')
         except UnicodeDecodeError:
             # OS X percent-encodes any bytes that aren't valid utf-8
-            s = ''
+            s = b''
             pos = 0
             l = len(path)
             while pos < l:
@@ -452,7 +452,7 @@
                     c = encoding.getutf8char(path, pos)
                     pos += len(c)
                 except ValueError:
-                    c = '%%%02X' % ord(path[pos : pos + 1])
+                    c = b'%%%02X' % ord(path[pos : pos + 1])
                     pos += 1
                 s += c
 
@@ -464,14 +464,14 @@
         return encoding.hfsignoreclean(enc)
 
 
-if pycompat.sysplatform == 'cygwin':
+if pycompat.sysplatform == b'cygwin':
     # workaround for cygwin, in which mount point part of path is
     # treated as case sensitive, even though underlying NTFS is case
     # insensitive.
 
     # default mount points
     cygwinmountpoints = sorted(
-        ["/usr/bin", "/usr/lib", "/cygdrive",], reverse=True
+        [b"/usr/bin", b"/usr/lib", b"/cygdrive",], reverse=True
     )
 
     # use upper-ing as normcase as same as NTFS workaround
@@ -515,8 +515,8 @@
 
 
 def shellquote(s):
-    if pycompat.sysplatform == 'OpenVMS':
-        return '"%s"' % s
+    if pycompat.sysplatform == b'OpenVMS':
+        return b'"%s"' % s
     global _needsshellquote
     if _needsshellquote is None:
         _needsshellquote = re.compile(br'[^a-zA-Z0-9._/+-]').search
@@ -524,7 +524,7 @@
         # "s" shouldn't have to be quoted
         return s
     else:
-        return "'%s'" % s.replace("'", "'\\''")
+        return b"'%s'" % s.replace(b"'", b"'\\''")
 
 
 def shellsplit(s):
@@ -538,7 +538,7 @@
 
 def testpid(pid):
     '''return False if pid dead, True if running or not sure'''
-    if pycompat.sysplatform == 'OpenVMS':
+    if pycompat.sysplatform == b'OpenVMS':
         return True
     try:
         os.kill(pid, 0)
@@ -557,11 +557,11 @@
     If command is a basename then PATH is searched for command.
     PATH isn't searched if command is an absolute or relative path.
     If command isn't found None is returned.'''
-    if pycompat.sysplatform == 'OpenVMS':
+    if pycompat.sysplatform == b'OpenVMS':
         return command
 
     def findexisting(executable):
-        'Will return executable if existing file'
+        b'Will return executable if existing file'
         if os.path.isfile(executable) and os.access(executable, os.X_OK):
             return executable
         return None
@@ -569,10 +569,10 @@
     if pycompat.ossep in command:
         return findexisting(command)
 
-    if pycompat.sysplatform == 'plan9':
-        return findexisting(os.path.join('/bin', command))
+    if pycompat.sysplatform == b'plan9':
+        return findexisting(os.path.join(b'/bin', command))
 
-    for path in encoding.environ.get('PATH', '').split(pycompat.ospathsep):
+    for path in encoding.environ.get(b'PATH', b'').split(pycompat.ospathsep):
         executable = findexisting(os.path.join(path, command))
         if executable is not None:
             return executable
@@ -752,7 +752,7 @@
             except IOError:
                 break
 
-        return ''.join(chunks)
+        return b''.join(chunks)
     finally:
         fcntl.fcntl(pipe, fcntl.F_SETFL, oldflags)
 
@@ -765,7 +765,7 @@
     dirname, basename = os.path.split(path)
     bakwdfd = None
     if dirname:
-        bakwdfd = os.open('.', os.O_DIRECTORY)
+        bakwdfd = os.open(b'.', os.O_DIRECTORY)
         os.chdir(dirname)
     sock.bind(basename)
     if bakwdfd:
--- a/mercurial/profiling.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/profiling.py	Sun Oct 06 09:48:39 2019 -0400
@@ -33,22 +33,24 @@
 
 @contextlib.contextmanager
 def lsprofile(ui, fp):
-    format = ui.config('profiling', 'format')
-    field = ui.config('profiling', 'sort')
-    limit = ui.configint('profiling', 'limit')
-    climit = ui.configint('profiling', 'nested')
+    format = ui.config(b'profiling', b'format')
+    field = ui.config(b'profiling', b'sort')
+    limit = ui.configint(b'profiling', b'limit')
+    climit = ui.configint(b'profiling', b'nested')
 
-    if format not in ['text', 'kcachegrind']:
-        ui.warn(_("unrecognized profiling format '%s'" " - Ignored\n") % format)
-        format = 'text'
+    if format not in [b'text', b'kcachegrind']:
+        ui.warn(
+            _(b"unrecognized profiling format '%s'" b" - Ignored\n") % format
+        )
+        format = b'text'
 
     try:
         from . import lsprof
     except ImportError:
         raise error.Abort(
             _(
-                'lsprof not available - install from '
-                'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'
+                b'lsprof not available - install from '
+                b'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'
             )
         )
     p = lsprof.Profiler()
@@ -58,7 +60,7 @@
     finally:
         p.disable()
 
-        if format == 'kcachegrind':
+        if format == b'kcachegrind':
             from . import lsprofcalltree
 
             calltree = lsprofcalltree.KCacheGrind(p)
@@ -77,12 +79,12 @@
     except ImportError:
         raise error.Abort(
             _(
-                'flamegraph not available - install from '
-                'https://github.com/evanhempel/python-flamegraph'
+                b'flamegraph not available - install from '
+                b'https://github.com/evanhempel/python-flamegraph'
             )
         )
     # developer config: profiling.freq
-    freq = ui.configint('profiling', 'freq')
+    freq = ui.configint(b'profiling', b'freq')
     filter_ = None
     collapse_recursion = True
     thread = flamegraph.ProfileThread(
@@ -96,7 +98,7 @@
         thread.stop()
         thread.join()
         print(
-            'Collected %d stack frames (%d unique) in %2.2f seconds.'
+            b'Collected %d stack frames (%d unique) in %2.2f seconds.'
             % (
                 util.timer() - start_time,
                 thread.num_frames(),
@@ -109,38 +111,38 @@
 def statprofile(ui, fp):
     from . import statprof
 
-    freq = ui.configint('profiling', 'freq')
+    freq = ui.configint(b'profiling', b'freq')
     if freq > 0:
         # Cannot reset when profiler is already active. So silently no-op.
         if statprof.state.profile_level == 0:
             statprof.reset(freq)
     else:
-        ui.warn(_("invalid sampling frequency '%s' - ignoring\n") % freq)
+        ui.warn(_(b"invalid sampling frequency '%s' - ignoring\n") % freq)
 
     track = ui.config(
-        'profiling', 'time-track', pycompat.iswindows and 'cpu' or 'real'
+        b'profiling', b'time-track', pycompat.iswindows and b'cpu' or b'real'
     )
-    statprof.start(mechanism='thread', track=track)
+    statprof.start(mechanism=b'thread', track=track)
 
     try:
         yield
     finally:
         data = statprof.stop()
 
-        profformat = ui.config('profiling', 'statformat')
+        profformat = ui.config(b'profiling', b'statformat')
 
         formats = {
-            'byline': statprof.DisplayFormats.ByLine,
-            'bymethod': statprof.DisplayFormats.ByMethod,
-            'hotpath': statprof.DisplayFormats.Hotpath,
-            'json': statprof.DisplayFormats.Json,
-            'chrome': statprof.DisplayFormats.Chrome,
+            b'byline': statprof.DisplayFormats.ByLine,
+            b'bymethod': statprof.DisplayFormats.ByMethod,
+            b'hotpath': statprof.DisplayFormats.Hotpath,
+            b'json': statprof.DisplayFormats.Json,
+            b'chrome': statprof.DisplayFormats.Chrome,
         }
 
         if profformat in formats:
             displayformat = formats[profformat]
         else:
-            ui.warn(_('unknown profiler output format: %s\n') % profformat)
+            ui.warn(_(b'unknown profiler output format: %s\n') % profformat)
             displayformat = statprof.DisplayFormats.Hotpath
 
         kwargs = {}
@@ -148,7 +150,7 @@
         def fraction(s):
             if isinstance(s, (float, int)):
                 return float(s)
-            if s.endswith('%'):
+            if s.endswith(b'%'):
                 v = float(s[:-1]) / 100
             else:
                 v = float(s)
@@ -156,15 +158,15 @@
                 return v
             raise ValueError(s)
 
-        if profformat == 'chrome':
-            showmin = ui.configwith(fraction, 'profiling', 'showmin', 0.005)
-            showmax = ui.configwith(fraction, 'profiling', 'showmax')
+        if profformat == b'chrome':
+            showmin = ui.configwith(fraction, b'profiling', b'showmin', 0.005)
+            showmax = ui.configwith(fraction, b'profiling', b'showmax')
             kwargs.update(minthreshold=showmin, maxthreshold=showmax)
-        elif profformat == 'hotpath':
+        elif profformat == b'hotpath':
             # inconsistent config: profiling.showmin
-            limit = ui.configwith(fraction, 'profiling', 'showmin', 0.05)
+            limit = ui.configwith(fraction, b'profiling', b'showmin', 0.05)
             kwargs[r'limit'] = limit
-            showtime = ui.configbool('profiling', 'showtime')
+            showtime = ui.configbool(b'profiling', b'showtime')
             kwargs[r'showtime'] = showtime
 
         statprof.display(fp, data=data, format=displayformat, **kwargs)
@@ -204,27 +206,27 @@
         if self._started:
             return
         self._started = True
-        profiler = encoding.environ.get('HGPROF')
+        profiler = encoding.environ.get(b'HGPROF')
         proffn = None
         if profiler is None:
-            profiler = self._ui.config('profiling', 'type')
-        if profiler not in ('ls', 'stat', 'flame'):
+            profiler = self._ui.config(b'profiling', b'type')
+        if profiler not in (b'ls', b'stat', b'flame'):
             # try load profiler from extension with the same name
             proffn = _loadprofiler(self._ui, profiler)
             if proffn is None:
                 self._ui.warn(
-                    _("unrecognized profiler '%s' - ignored\n") % profiler
+                    _(b"unrecognized profiler '%s' - ignored\n") % profiler
                 )
-                profiler = 'stat'
+                profiler = b'stat'
 
-        self._output = self._ui.config('profiling', 'output')
+        self._output = self._ui.config(b'profiling', b'output')
 
         try:
-            if self._output == 'blackbox':
+            if self._output == b'blackbox':
                 self._fp = util.stringio()
             elif self._output:
                 path = self._ui.expandpath(self._output)
-                self._fp = open(path, 'wb')
+                self._fp = open(path, b'wb')
             elif pycompat.iswindows:
                 # parse escape sequence by win32print()
                 class uifp(object):
@@ -245,9 +247,9 @@
 
             if proffn is not None:
                 pass
-            elif profiler == 'ls':
+            elif profiler == b'ls':
                 proffn = lsprofile
-            elif profiler == 'flame':
+            elif profiler == b'flame':
                 proffn = flameprofile
             else:
                 proffn = statprofile
@@ -264,12 +266,12 @@
             propagate = self._profiler.__exit__(
                 exception_type, exception_value, traceback
             )
-            if self._output == 'blackbox':
-                val = 'Profile:\n%s' % self._fp.getvalue()
+            if self._output == b'blackbox':
+                val = b'Profile:\n%s' % self._fp.getvalue()
                 # ui.log treats the input as a format string,
                 # so we need to escape any % signs.
-                val = val.replace('%', '%%')
-                self._ui.log('profile', val)
+                val = val.replace(b'%', b'%%')
+                self._ui.log(b'profile', val)
         self._closefp()
         return propagate
 
--- a/mercurial/progress.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/progress.py	Sun Oct 06 09:48:39 2019 -0400
@@ -16,12 +16,12 @@
 
 
 def spacejoin(*args):
-    return ' '.join(s for s in args if s)
+    return b' '.join(s for s in args if s)
 
 
 def shouldprint(ui):
-    return not (ui.quiet or ui.plain('progress')) and (
-        ui._isatty(ui.ferr) or ui.configbool('progress', 'assume-tty')
+    return not (ui.quiet or ui.plain(b'progress')) and (
+        ui._isatty(ui.ferr) or ui.configbool(b'progress', b'assume-tty')
     )
 
 
@@ -31,39 +31,39 @@
     This will properly display seconds, minutes, hours, days if needed"""
     if seconds < 60:
         # i18n: format XX seconds as "XXs"
-        return _("%02ds") % seconds
+        return _(b"%02ds") % seconds
     minutes = seconds // 60
     if minutes < 60:
         seconds -= minutes * 60
         # i18n: format X minutes and YY seconds as "XmYYs"
-        return _("%dm%02ds") % (minutes, seconds)
+        return _(b"%dm%02ds") % (minutes, seconds)
     # we're going to ignore seconds in this case
     minutes += 1
     hours = minutes // 60
     minutes -= hours * 60
     if hours < 30:
         # i18n: format X hours and YY minutes as "XhYYm"
-        return _("%dh%02dm") % (hours, minutes)
+        return _(b"%dh%02dm") % (hours, minutes)
     # we're going to ignore minutes in this case
     hours += 1
     days = hours // 24
     hours -= days * 24
     if days < 15:
         # i18n: format X days and YY hours as "XdYYh"
-        return _("%dd%02dh") % (days, hours)
+        return _(b"%dd%02dh") % (days, hours)
     # we're going to ignore hours in this case
     days += 1
     weeks = days // 7
     days -= weeks * 7
     if weeks < 55:
         # i18n: format X weeks and YY days as "XwYYd"
-        return _("%dw%02dd") % (weeks, days)
+        return _(b"%dw%02dd") % (weeks, days)
     # we're going to ignore days and treat a year as 52 weeks
     weeks += 1
     years = weeks // 52
     weeks -= years * 52
     # i18n: format X years and YY weeks as "XyYYw"
-    return _("%dy%02dw") % (years, weeks)
+    return _(b"%dy%02dw") % (years, weeks)
 
 
 # file_write() and file_flush() of Python 2 do not restart on EINTR if
@@ -98,18 +98,18 @@
         self.startvals = {}
         self.printed = False
         self.lastprint = time.time() + float(
-            self.ui.config('progress', 'delay')
+            self.ui.config(b'progress', b'delay')
         )
         self.curtopic = None
         self.lasttopic = None
         self.indetcount = 0
-        self.refresh = float(self.ui.config('progress', 'refresh'))
+        self.refresh = float(self.ui.config(b'progress', b'refresh'))
         self.changedelay = max(
-            3 * self.refresh, float(self.ui.config('progress', 'changedelay'))
+            3 * self.refresh, float(self.ui.config(b'progress', b'changedelay'))
         )
-        self.order = self.ui.configlist('progress', 'format')
+        self.order = self.ui.configlist(b'progress', b'format')
         self.estimateinterval = self.ui.configwith(
-            float, 'progress', 'estimateinterval'
+            float, b'progress', b'estimateinterval'
         )
 
     def show(self, now, topic, pos, item, unit, total):
@@ -117,40 +117,40 @@
             return
         termwidth = self.width()
         self.printed = True
-        head = ''
+        head = b''
         needprogress = False
-        tail = ''
+        tail = b''
         for indicator in self.order:
-            add = ''
-            if indicator == 'topic':
+            add = b''
+            if indicator == b'topic':
                 add = topic
-            elif indicator == 'number':
+            elif indicator == b'number':
                 if total:
                     add = b'%*d/%d' % (len(str(total)), pos, total)
                 else:
                     add = b'%d' % pos
-            elif indicator.startswith('item') and item:
-                slice = 'end'
-                if '-' in indicator:
-                    wid = int(indicator.split('-')[1])
-                elif '+' in indicator:
-                    slice = 'beginning'
-                    wid = int(indicator.split('+')[1])
+            elif indicator.startswith(b'item') and item:
+                slice = b'end'
+                if b'-' in indicator:
+                    wid = int(indicator.split(b'-')[1])
+                elif b'+' in indicator:
+                    slice = b'beginning'
+                    wid = int(indicator.split(b'+')[1])
                 else:
                     wid = 20
-                if slice == 'end':
+                if slice == b'end':
                     add = encoding.trim(item, wid, leftside=True)
                 else:
                     add = encoding.trim(item, wid)
-                add += (wid - encoding.colwidth(add)) * ' '
-            elif indicator == 'bar':
-                add = ''
+                add += (wid - encoding.colwidth(add)) * b' '
+            elif indicator == b'bar':
+                add = b''
                 needprogress = True
-            elif indicator == 'unit' and unit:
+            elif indicator == b'unit' and unit:
                 add = unit
-            elif indicator == 'estimate':
+            elif indicator == b'estimate':
                 add = self.estimate(topic, pos, total, now)
-            elif indicator == 'speed':
+            elif indicator == b'speed':
                 add = self.speed(topic, pos, unit, now)
             if not needprogress:
                 head = spacejoin(head, add)
@@ -165,10 +165,10 @@
             progwidth = termwidth - used - 3
             if total and pos <= total:
                 amt = pos * progwidth // total
-                bar = '=' * (amt - 1)
+                bar = b'=' * (amt - 1)
                 if amt > 0:
-                    bar += '>'
-                bar += ' ' * (progwidth - amt)
+                    bar += b'>'
+                bar += b' ' * (progwidth - amt)
             else:
                 progwidth -= 3
                 self.indetcount += 1
@@ -177,22 +177,22 @@
                 amt = self.indetcount % (2 * progwidth)
                 amt -= progwidth
                 bar = (
-                    ' ' * int(progwidth - abs(amt))
-                    + '<=>'
-                    + ' ' * int(abs(amt))
+                    b' ' * int(progwidth - abs(amt))
+                    + b'<=>'
+                    + b' ' * int(abs(amt))
                 )
-            prog = ''.join(('[', bar, ']'))
+            prog = b''.join((b'[', bar, b']'))
             out = spacejoin(head, prog, tail)
         else:
             out = spacejoin(head, tail)
-        self._writeerr('\r' + encoding.trim(out, termwidth))
+        self._writeerr(b'\r' + encoding.trim(out, termwidth))
         self.lasttopic = topic
         self._flusherr()
 
     def clear(self):
         if not self.printed or not self.lastprint or not shouldprint(self.ui):
             return
-        self._writeerr('\r%s\r' % (' ' * self.width()))
+        self._writeerr(b'\r%s\r' % (b' ' * self.width()))
         if self.printed:
             # force immediate re-paint of progress bar
             self.lastprint = 0
@@ -200,10 +200,10 @@
     def complete(self):
         if not shouldprint(self.ui):
             return
-        if self.ui.configbool('progress', 'clear-complete'):
+        if self.ui.configbool(b'progress', b'clear-complete'):
             self.clear()
         else:
-            self._writeerr('\n')
+            self._writeerr(b'\n')
         self._flusherr()
 
     def _flusherr(self):
@@ -214,11 +214,11 @@
 
     def width(self):
         tw = self.ui.termwidth()
-        return min(int(self.ui.config('progress', 'width', default=tw)), tw)
+        return min(int(self.ui.config(b'progress', b'width', default=tw)), tw)
 
     def estimate(self, topic, pos, total, now):
         if total is None:
-            return ''
+            return b''
         initialpos = self.startvals[topic]
         target = total - initialpos
         delta = pos - initialpos
@@ -226,15 +226,15 @@
             elapsed = now - self.starttimes[topic]
             seconds = (elapsed * (target - delta)) // delta + 1
             return fmtremaining(seconds)
-        return ''
+        return b''
 
     def speed(self, topic, pos, unit, now):
         initialpos = self.startvals[topic]
         delta = pos - initialpos
         elapsed = now - self.starttimes[topic]
         if elapsed > 0:
-            return _('%d %s/sec') % (delta / elapsed, unit)
-        return ''
+            return _(b'%d %s/sec') % (delta / elapsed, unit)
+        return b''
 
     def _oktoprint(self, now):
         '''Check if conditions are met to print - e.g. changedelay elapsed'''
@@ -275,7 +275,7 @@
             self.startvals[topic] = pos - newdelta
             self.starttimes[topic] = now - interval
 
-    def progress(self, topic, pos, item='', unit='', total=None):
+    def progress(self, topic, pos, item=b'', unit=b'', total=None):
         if pos is None:
             self.closetopic(topic)
             return
--- a/mercurial/pure/base85.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/pure/base85.py	Sun Oct 06 09:48:39 2019 -0400
@@ -12,8 +12,8 @@
 from .. import pycompat
 
 _b85chars = pycompat.bytestr(
-    "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdef"
-    "ghijklmnopqrstuvwxyz!#$%&()*+-;<=>?@^_`{|}~"
+    b"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdef"
+    b"ghijklmnopqrstuvwxyz!#$%&()*+-;<=>?@^_`{|}~"
 )
 _b85chars2 = [(a + b) for a in _b85chars for b in _b85chars]
 _b85dec = {}
@@ -29,11 +29,11 @@
     l = len(text)
     r = l % 4
     if r:
-        text += '\0' * (4 - r)
+        text += b'\0' * (4 - r)
     longs = len(text) >> 2
-    words = struct.unpack('>%dL' % longs, text)
+    words = struct.unpack(b'>%dL' % longs, text)
 
-    out = ''.join(
+    out = b''.join(
         _b85chars[(word // 52200625) % 85]
         + _b85chars2[(word // 7225) % 7225]
         + _b85chars2[word % 7225]
@@ -67,10 +67,10 @@
                 acc = acc * 85 + _b85dec[c]
             except KeyError:
                 raise ValueError(
-                    'bad base85 character at position %d' % (i + j)
+                    b'bad base85 character at position %d' % (i + j)
                 )
         if acc > 4294967295:
-            raise ValueError('Base85 overflow in hunk starting at byte %d' % i)
+            raise ValueError(b'Base85 overflow in hunk starting at byte %d' % i)
         out.append(acc)
 
     # Pad final chunk if necessary
@@ -81,7 +81,7 @@
             acc += 0xFFFFFF >> (cl - 2) * 8
         out[-1] = acc
 
-    out = struct.pack('>%dL' % (len(out)), *out)
+    out = struct.pack(b'>%dL' % (len(out)), *out)
     if cl:
         out = out[: -(5 - cl)]
 
--- a/mercurial/pure/bdiff.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/pure/bdiff.py	Sun Oct 06 09:48:39 2019 -0400
@@ -14,9 +14,9 @@
 
 def splitnewlines(text):
     '''like str.splitlines, but only split on newlines.'''
-    lines = [l + '\n' for l in text.split('\n')]
+    lines = [l + b'\n' for l in text.split(b'\n')]
     if lines:
-        if lines[-1] == '\n':
+        if lines[-1] == b'\n':
             lines.pop()
         else:
             lines[-1] = lines[-1][:-1]
@@ -60,8 +60,8 @@
     b = bytes(b).splitlines(True)
 
     if not a:
-        s = "".join(b)
-        return s and (struct.pack(">lll", 0, 0, len(s)) + s)
+        s = b"".join(b)
+        return s and (struct.pack(b">lll", 0, 0, len(s)) + s)
 
     bin = []
     p = [0]
@@ -73,13 +73,13 @@
     la = 0
     lb = 0
     for am, bm, size in d:
-        s = "".join(b[lb:bm])
+        s = b"".join(b[lb:bm])
         if am > la or s:
-            bin.append(struct.pack(">lll", p[la], p[am], len(s)) + s)
+            bin.append(struct.pack(b">lll", p[la], p[am], len(s)) + s)
         la = am + size
         lb = bm + size
 
-    return "".join(bin)
+    return b"".join(bin)
 
 
 def blocks(a, b):
@@ -92,8 +92,8 @@
 
 def fixws(text, allws):
     if allws:
-        text = re.sub('[ \t\r]+', '', text)
+        text = re.sub(b'[ \t\r]+', b'', text)
     else:
-        text = re.sub('[ \t\r]+', ' ', text)
-        text = text.replace(' \n', '\n')
+        text = re.sub(b'[ \t\r]+', b' ', text)
+        text = text.replace(b' \n', b'\n')
     return text
--- a/mercurial/pure/charencode.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/pure/charencode.py	Sun Oct 06 09:48:39 2019 -0400
@@ -37,19 +37,19 @@
 
 
 _jsonmap = []
-_jsonmap.extend("\\u%04x" % x for x in range(32))
+_jsonmap.extend(b"\\u%04x" % x for x in range(32))
 _jsonmap.extend(pycompat.bytechr(x) for x in range(32, 127))
-_jsonmap.append('\\u007f')
-_jsonmap[0x09] = '\\t'
-_jsonmap[0x0A] = '\\n'
-_jsonmap[0x22] = '\\"'
-_jsonmap[0x5C] = '\\\\'
-_jsonmap[0x08] = '\\b'
-_jsonmap[0x0C] = '\\f'
-_jsonmap[0x0D] = '\\r'
+_jsonmap.append(b'\\u007f')
+_jsonmap[0x09] = b'\\t'
+_jsonmap[0x0A] = b'\\n'
+_jsonmap[0x22] = b'\\"'
+_jsonmap[0x5C] = b'\\\\'
+_jsonmap[0x08] = b'\\b'
+_jsonmap[0x0C] = b'\\f'
+_jsonmap[0x0D] = b'\\r'
 _paranoidjsonmap = _jsonmap[:]
-_paranoidjsonmap[0x3C] = '\\u003c'  # '<' (e.g. escape "</script>")
-_paranoidjsonmap[0x3E] = '\\u003e'  # '>'
+_paranoidjsonmap[0x3C] = b'\\u003c'  # '<' (e.g. escape "</script>")
+_paranoidjsonmap[0x3E] = b'\\u003e'  # '>'
 _jsonmap.extend(pycompat.bytechr(x) for x in range(128, 256))
 
 
@@ -63,7 +63,7 @@
     else:
         jm = _jsonmap
     try:
-        return ''.join(jm[x] for x in bytearray(u8chars))
+        return b''.join(jm[x] for x in bytearray(u8chars))
     except IndexError:
         raise ValueError
 
@@ -87,4 +87,4 @@
     u16b = u8chars.decode('utf-8', _utf8strict).encode('utf-16', _utf8strict)
     u16codes = array.array(r'H', u16b)
     u16codes.pop(0)  # drop BOM
-    return ''.join(jm[x] if x < 128 else '\\u%04x' % x for x in u16codes)
+    return b''.join(jm[x] if x < 128 else b'\\u%04x' % x for x in u16codes)
--- a/mercurial/pure/mpatch.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/pure/mpatch.py	Sun Oct 06 09:48:39 2019 -0400
@@ -97,9 +97,9 @@
         while pos < end:
             m.seek(pos)
             try:
-                p1, p2, l = struct.unpack(">lll", m.read(12))
+                p1, p2, l = struct.unpack(b">lll", m.read(12))
             except struct.error:
-                raise mpatchError("patch cannot be decoded")
+                raise mpatchError(b"patch cannot be decoded")
             _pull(new, frags, p1 - last)  # what didn't change
             _pull([], frags, p2 - p1)  # what got deleted
             new.append((l, pos + 12))  # what got added
@@ -120,7 +120,7 @@
 
     while data <= binend:
         decode = delta[bin : bin + 12]
-        start, end, length = struct.unpack(">lll", decode)
+        start, end, length = struct.unpack(b">lll", decode)
         if start > end:
             break
         bin = data + length
@@ -130,7 +130,7 @@
         outlen += length
 
     if bin != binend:
-        raise mpatchError("patch cannot be decoded")
+        raise mpatchError(b"patch cannot be decoded")
 
     outlen += orig - last
     return outlen
--- a/mercurial/pure/osutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/pure/osutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -74,7 +74,7 @@
     _SCM_RIGHTS = 0x01
     _socklen_t = ctypes.c_uint
 
-    if pycompat.sysplatform.startswith('linux'):
+    if pycompat.sysplatform.startswith(b'linux'):
         # socket.h says "the type should be socklen_t but the definition of
         # the kernel is incompatible with this."
         _cmsg_len_t = ctypes.c_size_t
@@ -122,7 +122,7 @@
     else:
         # recvmsg isn't always provided by libc; such systems are unsupported
         def _recvmsg(sockfd, msg, flags):
-            raise NotImplementedError('unsupported platform')
+            raise NotImplementedError(b'unsupported platform')
 
     def _CMSG_FIRSTHDR(msgh):
         if msgh.msg_controllen < ctypes.sizeof(_cmsghdr):
--- a/mercurial/pure/parsers.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/pure/parsers.py	Sun Oct 06 09:48:39 2019 -0400
@@ -28,9 +28,9 @@
     return x
 
 
-indexformatng = ">Qiiiiii20s12x"
-indexfirst = struct.calcsize('Q')
-sizeint = struct.calcsize('i')
+indexformatng = b">Qiiiiii20s12x"
+indexfirst = struct.calcsize(b'Q')
+sizeint = struct.calcsize(b'i')
 indexsize = struct.calcsize(indexformatng)
 
 
@@ -51,7 +51,7 @@
 
     def _check_index(self, i):
         if not isinstance(i, int):
-            raise TypeError("expecting int indexes")
+            raise TypeError(b"expecting int indexes")
         if i < 0 or i >= len(self):
             raise IndexError
 
@@ -83,7 +83,7 @@
 
     def __delitem__(self, i):
         if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
-            raise ValueError("deleting slices only supports a:-1 with step 1")
+            raise ValueError(b"deleting slices only supports a:-1 with step 1")
         i = i.start
         self._check_index(i)
         if i < self._lgt:
@@ -108,19 +108,19 @@
         count = 0
         while off <= len(self._data) - indexsize:
             (s,) = struct.unpack(
-                '>i', self._data[off + indexfirst : off + sizeint + indexfirst]
+                b'>i', self._data[off + indexfirst : off + sizeint + indexfirst]
             )
             if lgt is not None:
                 self._offsets[count] = off
             count += 1
             off += indexsize + s
         if off != len(self._data):
-            raise ValueError("corrupted data")
+            raise ValueError(b"corrupted data")
         return count
 
     def __delitem__(self, i):
         if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
-            raise ValueError("deleting slices only supports a:-1 with step 1")
+            raise ValueError(b"deleting slices only supports a:-1 with step 1")
         i = i.start
         self._check_index(i)
         if i < self._lgt:
@@ -143,7 +143,7 @@
 def parse_dirstate(dmap, copymap, st):
     parents = [st[:20], st[20:40]]
     # dereference fields so they will be local in loop
-    format = ">cllll"
+    format = b">cllll"
     e_size = struct.calcsize(format)
     pos1 = 40
     l = len(st)
@@ -151,11 +151,11 @@
     # the inner loop
     while pos1 < l:
         pos2 = pos1 + e_size
-        e = _unpack(">cllll", st[pos1:pos2])  # a literal here is faster
+        e = _unpack(b">cllll", st[pos1:pos2])  # a literal here is faster
         pos1 = pos2 + e[4]
         f = st[pos2:pos1]
-        if '\0' in f:
-            f, c = f.split('\0')
+        if b'\0' in f:
+            f, c = f.split(b'\0')
             copymap[f] = c
         dmap[f] = e[:4]
     return parents
@@ -165,9 +165,9 @@
     now = int(now)
     cs = stringio()
     write = cs.write
-    write("".join(pl))
+    write(b"".join(pl))
     for f, e in dmap.iteritems():
-        if e[0] == 'n' and e[3] == now:
+        if e[0] == b'n' and e[3] == now:
             # The file was last modified "simultaneously" with the current
             # write to dirstate (i.e. within the same second for file-
             # systems with a granularity of 1 sec). This commonly happens
@@ -181,8 +181,8 @@
             dmap[f] = e
 
         if f in copymap:
-            f = "%s\0%s" % (f, copymap[f])
-        e = _pack(">cllll", e[0], e[1], e[2], e[3], len(f))
+            f = b"%s\0%s" % (f, copymap[f])
+        e = _pack(b">cllll", e[0], e[1], e[2], e[3], len(f))
         write(e)
         write(f)
     return cs.getvalue()
--- a/mercurial/pushkey.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/pushkey.py	Sun Oct 06 09:48:39 2019 -0400
@@ -18,17 +18,17 @@
 def _nslist(repo):
     n = {}
     for k in _namespaces:
-        n[k] = ""
+        n[k] = b""
     if not obsolete.isenabled(repo, obsolete.exchangeopt):
-        n.pop('obsolete')
+        n.pop(b'obsolete')
     return n
 
 
 _namespaces = {
-    "namespaces": (lambda *x: False, _nslist),
-    "bookmarks": (bookmarks.pushbookmark, bookmarks.listbookmarks),
-    "phases": (phases.pushphase, phases.listphases),
-    "obsolete": (obsolete.pushmarker, obsolete.listmarkers),
+    b"namespaces": (lambda *x: False, _nslist),
+    b"bookmarks": (bookmarks.pushbookmark, bookmarks.listbookmarks),
+    b"phases": (phases.pushphase, phases.listphases),
+    b"obsolete": (obsolete.pushmarker, obsolete.listmarkers),
 }
 
 
@@ -59,13 +59,13 @@
 
 def encodekeys(keys):
     """encode the content of a pushkey namespace for exchange over the wire"""
-    return '\n'.join(['%s\t%s' % (encode(k), encode(v)) for k, v in keys])
+    return b'\n'.join([b'%s\t%s' % (encode(k), encode(v)) for k, v in keys])
 
 
 def decodekeys(data):
     """decode the content of a pushkey namespace from exchange over the wire"""
     result = {}
     for l in data.splitlines():
-        k, v = l.split('\t')
+        k, v = l.split(b'\t')
         result[decode(k)] = decode(v)
     return result
--- a/mercurial/pvec.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/pvec.py	Sun Oct 06 09:48:39 2019 -0400
@@ -74,7 +74,7 @@
 
 
 def _str(v, l):
-    bs = ""
+    bs = b""
     for p in pycompat.xrange(l):
         bs = chr(v & 255) + bs
         v >>= 8
@@ -159,7 +159,7 @@
 def ctxpvec(ctx):
     '''construct a pvec for ctx while filling in the cache'''
     r = ctx.repo()
-    if not util.safehasattr(r, "_pveccache"):
+    if not util.safehasattr(r, b"_pveccache"):
         r._pveccache = {}
     pvc = r._pveccache
     if ctx.rev() not in pvc:
@@ -213,7 +213,7 @@
 
     def __sub__(self, b):
         if self | b:
-            raise ValueError("concurrent pvecs")
+            raise ValueError(b"concurrent pvecs")
         return self._depth - b._depth
 
     def distance(self, b):
--- a/mercurial/pycompat.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/pycompat.py	Sun Oct 06 09:48:39 2019 -0400
@@ -356,7 +356,7 @@
     bytesurl = identity
 
     # this can't be parsed on Python 3
-    exec('def raisewithtb(exc, tb):\n' '    raise exc, None, tb\n')
+    exec(b'def raisewithtb(exc, tb):\n' b'    raise exc, None, tb\n')
 
     def fsencode(filename):
         """
--- a/mercurial/rcutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/rcutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -31,7 +31,7 @@
     if os.path.isdir(p):
         join = os.path.join
         return sorted(
-            join(p, f) for f, k in util.listdir(p) if f.endswith('.rc')
+            join(p, f) for f, k in util.listdir(p) if f.endswith(b'.rc')
         )
     return [p]
 
@@ -47,22 +47,22 @@
     if env is None:
         env = encoding.environ
     checklist = [
-        ('EDITOR', 'ui', 'editor'),
-        ('VISUAL', 'ui', 'editor'),
-        ('PAGER', 'pager', 'pager'),
+        (b'EDITOR', b'ui', b'editor'),
+        (b'VISUAL', b'ui', b'editor'),
+        (b'PAGER', b'pager', b'pager'),
     ]
     result = []
     for envname, section, configname in checklist:
         if envname not in env:
             continue
-        result.append((section, configname, env[envname], '$%s' % envname))
+        result.append((section, configname, env[envname], b'$%s' % envname))
     return result
 
 
 def defaultrcpath():
     '''return rc paths in default.d'''
     path = []
-    defaultpath = os.path.join(util.datapath, 'default.d')
+    defaultpath = os.path.join(util.datapath, b'default.d')
     if os.path.isdir(defaultpath):
         path = _expandrcpath(defaultpath)
     return path
@@ -80,18 +80,20 @@
     and is the config file path. if type is 'items', obj is a list of (section,
     name, value, source) that should fill the config directly.
     '''
-    envrc = ('items', envrcitems())
+    envrc = (b'items', envrcitems())
 
-    if 'HGRCPATH' in encoding.environ:
+    if b'HGRCPATH' in encoding.environ:
         # assume HGRCPATH is all about user configs so environments can be
         # overridden.
         _rccomponents = [envrc]
-        for p in encoding.environ['HGRCPATH'].split(pycompat.ospathsep):
+        for p in encoding.environ[b'HGRCPATH'].split(pycompat.ospathsep):
             if not p:
                 continue
-            _rccomponents.extend(('path', p) for p in _expandrcpath(p))
+            _rccomponents.extend((b'path', p) for p in _expandrcpath(p))
     else:
-        normpaths = lambda paths: [('path', os.path.normpath(p)) for p in paths]
+        normpaths = lambda paths: [
+            (b'path', os.path.normpath(p)) for p in paths
+        ]
         _rccomponents = normpaths(defaultrcpath() + systemrcpath())
         _rccomponents.append(envrc)
         _rccomponents.extend(normpaths(userrcpath()))
@@ -102,4 +104,4 @@
     '''return a dict of default environment variables and their values,
     intended to be set before starting a pager.
     '''
-    return {'LESS': 'FRX', 'LV': '-c'}
+    return {b'LESS': b'FRX', b'LV': b'-c'}
--- a/mercurial/registrar.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/registrar.py	Sun Oct 06 09:48:39 2019 -0400
@@ -62,10 +62,10 @@
         name = self._getname(decl)
 
         if name in self._table:
-            msg = 'duplicate registration for name: "%s"' % name
+            msg = b'duplicate registration for name: "%s"' % name
             raise error.ProgrammingError(msg)
 
-        if func.__doc__ and not util.safehasattr(func, '_origdoc'):
+        if func.__doc__ and not util.safehasattr(func, b'_origdoc'):
             func._origdoc = func.__doc__.strip()
             doc = pycompat.sysbytes(func._origdoc)
             func.__doc__ = pycompat.sysstr(self._formatdoc(decl, doc))
@@ -83,13 +83,13 @@
         of the two registrars must match.
         """
         if not isinstance(registrarbase, type(self)):
-            msg = "cannot merge different types of registrar"
+            msg = b"cannot merge different types of registrar"
             raise error.ProgrammingError(msg)
 
         dups = set(registrarbase._table).intersection(self._table)
 
         if dups:
-            msg = 'duplicate registration for names: "%s"' % '", "'.join(dups)
+            msg = b'duplicate registration for names: "%s"' % b'", "'.join(dups)
             raise error.ProgrammingError(msg)
 
         self._table.update(registrarbase._table)
@@ -97,7 +97,7 @@
     def _parsefuncdecl(self, decl):
         """Parse function declaration and return the name of function in it
         """
-        i = decl.find('(')
+        i = decl.find(b'(')
         if i >= 0:
             return decl[:i]
         else:
@@ -195,19 +195,19 @@
     # [alias]
     # myalias = something
     # myalias:category = repo
-    CATEGORY_REPO_CREATION = 'repo'
-    CATEGORY_REMOTE_REPO_MANAGEMENT = 'remote'
-    CATEGORY_COMMITTING = 'commit'
-    CATEGORY_CHANGE_MANAGEMENT = 'management'
-    CATEGORY_CHANGE_ORGANIZATION = 'organization'
-    CATEGORY_FILE_CONTENTS = 'files'
-    CATEGORY_CHANGE_NAVIGATION = 'navigation'
-    CATEGORY_WORKING_DIRECTORY = 'wdir'
-    CATEGORY_IMPORT_EXPORT = 'import'
-    CATEGORY_MAINTENANCE = 'maintenance'
-    CATEGORY_HELP = 'help'
-    CATEGORY_MISC = 'misc'
-    CATEGORY_NONE = 'none'
+    CATEGORY_REPO_CREATION = b'repo'
+    CATEGORY_REMOTE_REPO_MANAGEMENT = b'remote'
+    CATEGORY_COMMITTING = b'commit'
+    CATEGORY_CHANGE_MANAGEMENT = b'management'
+    CATEGORY_CHANGE_ORGANIZATION = b'organization'
+    CATEGORY_FILE_CONTENTS = b'files'
+    CATEGORY_CHANGE_NAVIGATION = b'navigation'
+    CATEGORY_WORKING_DIRECTORY = b'wdir'
+    CATEGORY_IMPORT_EXPORT = b'import'
+    CATEGORY_MAINTENANCE = b'maintenance'
+    CATEGORY_HELP = b'help'
+    CATEGORY_MISC = b'misc'
+    CATEGORY_NONE = b'none'
 
     def _doregister(
         self,
@@ -280,7 +280,7 @@
     """
 
     _getname = _funcregistrarbase._parsefuncdecl
-    _docformat = "``%s``\n    %s"
+    _docformat = b"``%s``\n    %s"
 
     def _extrasetup(self, name, func, safe=False, takeorder=False, weight=1):
         func._safe = safe
@@ -331,7 +331,7 @@
     """
 
     _getname = _funcregistrarbase._parsefuncdecl
-    _docformat = "``%s``\n    %s"
+    _docformat = b"``%s``\n    %s"
 
     def _extrasetup(self, name, func, callstatus=False, weight=1):
         func._callstatus = callstatus
@@ -342,7 +342,7 @@
     """Base of decorator to register functions as template specific one
     """
 
-    _docformat = ":%s: %s"
+    _docformat = b":%s: %s"
 
 
 class templatekeyword(_templateregistrarbase):
@@ -506,12 +506,12 @@
     Otherwise, explicit 'filemerge.loadinternalmerge()' is needed.
     """
 
-    _docformat = "``:%s``\n    %s"
+    _docformat = b"``:%s``\n    %s"
 
     # merge type definitions:
     nomerge = None
-    mergeonly = 'mergeonly'  # just the full merge, no premerge
-    fullmerge = 'fullmerge'  # both premerge and merge
+    mergeonly = b'mergeonly'  # just the full merge, no premerge
+    fullmerge = b'fullmerge'  # both premerge and merge
 
     def _extrasetup(
         self,
@@ -531,4 +531,4 @@
         symlinkcap = symlink or mergetype == self.nomerge
 
         # actual capabilities, which this internal merge tool has
-        func.capabilities = {"binary": binarycap, "symlink": symlinkcap}
+        func.capabilities = {b"binary": binarycap, b"symlink": symlinkcap}
--- a/mercurial/repair.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/repair.py	Sun Oct 06 09:48:39 2019 -0400
@@ -36,16 +36,16 @@
 ):
     """create a bundle with the specified revisions as a backup"""
 
-    backupdir = "strip-backup"
+    backupdir = b"strip-backup"
     vfs = repo.vfs
     if not vfs.isdir(backupdir):
         vfs.mkdir(backupdir)
 
     # Include a hash of all the nodes in the filename for uniqueness
-    allcommits = repo.set('%ln::%ln', bases, heads)
+    allcommits = repo.set(b'%ln::%ln', bases, heads)
     allhashes = sorted(c.hex() for c in allcommits)
-    totalhash = hashlib.sha1(''.join(allhashes)).digest()
-    name = "%s/%s-%s-%s.hg" % (
+    totalhash = hashlib.sha1(b''.join(allhashes)).digest()
+    name = b"%s/%s-%s-%s.hg" % (
         backupdir,
         short(node),
         hex(totalhash[:4]),
@@ -54,25 +54,25 @@
 
     cgversion = changegroup.localversion(repo)
     comp = None
-    if cgversion != '01':
-        bundletype = "HG20"
+    if cgversion != b'01':
+        bundletype = b"HG20"
         if compress:
-            comp = 'BZ'
+            comp = b'BZ'
     elif compress:
-        bundletype = "HG10BZ"
+        bundletype = b"HG10BZ"
     else:
-        bundletype = "HG10UN"
+        bundletype = b"HG10UN"
 
     outgoing = discovery.outgoing(repo, missingroots=bases, missingheads=heads)
     contentopts = {
-        'cg.version': cgversion,
-        'obsolescence': obsolescence,
-        'phases': True,
+        b'cg.version': cgversion,
+        b'obsolescence': obsolescence,
+        b'phases': True,
     }
     return bundle2.writenewbundle(
         repo.ui,
         repo,
-        'strip',
+        b'strip',
         name,
         bundletype,
         outgoing,
@@ -109,16 +109,16 @@
     return s
 
 
-def strip(ui, repo, nodelist, backup=True, topic='backup'):
+def strip(ui, repo, nodelist, backup=True, topic=b'backup'):
     # This function requires the caller to lock the repo, but it operates
     # within a transaction of its own, and thus requires there to be no current
     # transaction when it is called.
     if repo.currenttransaction() is not None:
-        raise error.ProgrammingError('cannot strip from inside a transaction')
+        raise error.ProgrammingError(b'cannot strip from inside a transaction')
 
     # Simple way to maintain backwards compatibility for this
     # argument.
-    if backup in ['none', 'strip']:
+    if backup in [b'none', b'strip']:
         backup = False
 
     repo = repo.unfiltered()
@@ -165,7 +165,7 @@
     stripbases = [cl.node(r) for r in tostrip]
 
     stripobsidx = obsmarkers = ()
-    if repo.ui.configbool('devel', 'strip-obsmarkers'):
+    if repo.ui.configbool(b'devel', b'strip-obsmarkers'):
         obsmarkers = obsutil.exclusivemarkers(repo, stripbases)
     if obsmarkers:
         stripobsidx = [
@@ -192,14 +192,14 @@
             savebases,
             saveheads,
             node,
-            'temp',
+            b'temp',
             compress=False,
             obsolescence=False,
         )
 
     with ui.uninterruptible():
         try:
-            with repo.transaction("strip") as tr:
+            with repo.transaction(b"strip") as tr:
                 # TODO this code violates the interface abstraction of the
                 # transaction and makes assumptions that file storage is
                 # using append-only files. We'll need some kind of storage
@@ -216,7 +216,7 @@
 
                 for i in pycompat.xrange(offset, len(tr._entries)):
                     file, troffset, ignore = tr._entries[i]
-                    with repo.svfs(file, 'a', checkambig=True) as fp:
+                    with repo.svfs(file, b'a', checkambig=True) as fp:
                         fp.truncate(troffset)
                     if troffset == 0:
                         repo.store.markremoved(file)
@@ -227,25 +227,25 @@
                 repo._phasecache.filterunknown(repo)
 
             if tmpbundlefile:
-                ui.note(_("adding branch\n"))
-                f = vfs.open(tmpbundlefile, "rb")
+                ui.note(_(b"adding branch\n"))
+                f = vfs.open(tmpbundlefile, b"rb")
                 gen = exchange.readbundle(ui, f, tmpbundlefile, vfs)
                 if not repo.ui.verbose:
                     # silence internal shuffling chatter
                     repo.ui.pushbuffer()
-                tmpbundleurl = 'bundle:' + vfs.join(tmpbundlefile)
-                txnname = 'strip'
+                tmpbundleurl = b'bundle:' + vfs.join(tmpbundlefile)
+                txnname = b'strip'
                 if not isinstance(gen, bundle2.unbundle20):
-                    txnname = "strip\n%s" % util.hidepassword(tmpbundleurl)
+                    txnname = b"strip\n%s" % util.hidepassword(tmpbundleurl)
                 with repo.transaction(txnname) as tr:
                     bundle2.applybundle(
-                        repo, gen, tr, source='strip', url=tmpbundleurl
+                        repo, gen, tr, source=b'strip', url=tmpbundleurl
                     )
                 if not repo.ui.verbose:
                     repo.ui.popbuffer()
                 f.close()
 
-            with repo.transaction('repair') as tr:
+            with repo.transaction(b'repair') as tr:
                 bmchanges = [(m, repo[newbmtarget].node()) for m in updatebm]
                 repo._bookmarks.applychanges(repo, tr, bmchanges)
 
@@ -256,7 +256,7 @@
                 except OSError as e:
                     if e.errno != errno.ENOENT:
                         ui.warn(
-                            _('error removing %s: %s\n')
+                            _(b'error removing %s: %s\n')
                             % (
                                 undovfs.join(undofile),
                                 stringutil.forcebytestr(e),
@@ -266,18 +266,18 @@
         except:  # re-raises
             if backupfile:
                 ui.warn(
-                    _("strip failed, backup bundle stored in '%s'\n")
+                    _(b"strip failed, backup bundle stored in '%s'\n")
                     % vfs.join(backupfile)
                 )
             if tmpbundlefile:
                 ui.warn(
-                    _("strip failed, unrecovered changes stored in '%s'\n")
+                    _(b"strip failed, unrecovered changes stored in '%s'\n")
                     % vfs.join(tmpbundlefile)
                 )
                 ui.warn(
                     _(
-                        "(fix the problem, then recover the changesets with "
-                        "\"hg unbundle '%s'\")\n"
+                        b"(fix the problem, then recover the changesets with "
+                        b"\"hg unbundle '%s'\")\n"
                     )
                     % vfs.join(tmpbundlefile)
                 )
@@ -293,9 +293,9 @@
     return backupfile
 
 
-def softstrip(ui, repo, nodelist, backup=True, topic='backup'):
+def softstrip(ui, repo, nodelist, backup=True, topic=b'backup'):
     """perform a "soft" strip using the archived phase"""
-    tostrip = [c.node() for c in repo.set('sort(%ln::)', nodelist)]
+    tostrip = [c.node() for c in repo.set(b'sort(%ln::)', nodelist)]
     if not tostrip:
         return None
 
@@ -304,7 +304,7 @@
         node = tostrip[0]
         backupfile = _createstripbackup(repo, tostrip, node, topic)
 
-    with repo.transaction('strip') as tr:
+    with repo.transaction(b'strip') as tr:
         phases.retractboundary(repo, tr, phases.archived, tostrip)
         bmchanges = [(m, repo[newbmtarget].node()) for m in updatebm]
         repo._bookmarks.applychanges(repo, tr, bmchanges)
@@ -325,11 +325,11 @@
     if updatebm:
         # For a set s, max(parents(s) - s) is the same as max(heads(::s - s)),
         # but is much faster
-        newbmtarget = repo.revs('max(parents(%ld) - (%ld))', tostrip, tostrip)
+        newbmtarget = repo.revs(b'max(parents(%ld) - (%ld))', tostrip, tostrip)
         if newbmtarget:
             newbmtarget = repo[newbmtarget.first()].node()
         else:
-            newbmtarget = '.'
+            newbmtarget = b'.'
     return newbmtarget, updatebm
 
 
@@ -338,9 +338,9 @@
     vfs = repo.vfs
     cl = repo.changelog
     backupfile = backupbundle(repo, stripbases, cl.heads(), node, topic)
-    repo.ui.status(_("saved backup bundle to %s\n") % vfs.join(backupfile))
+    repo.ui.status(_(b"saved backup bundle to %s\n") % vfs.join(backupfile))
     repo.ui.log(
-        "backupbundle", "saved backup bundle to %s\n", vfs.join(backupfile)
+        b"backupbundle", b"saved backup bundle to %s\n", vfs.join(backupfile)
     )
     return backupfile
 
@@ -353,16 +353,16 @@
     # orphaned = affected - wanted
     # affected = descendants(roots(wanted))
     # wanted = revs
-    revset = '%ld - ( ::( (roots(%ld):: and not _phase(%s)) -%ld) )'
+    revset = b'%ld - ( ::( (roots(%ld):: and not _phase(%s)) -%ld) )'
     tostrip = set(repo.revs(revset, revs, revs, phases.internal, revs))
     notstrip = revs - tostrip
     if notstrip:
-        nodestr = ', '.join(sorted(short(repo[n].node()) for n in notstrip))
+        nodestr = b', '.join(sorted(short(repo[n].node()) for n in notstrip))
         ui.warn(
-            _('warning: orphaned descendants detected, ' 'not stripping %s\n')
+            _(b'warning: orphaned descendants detected, ' b'not stripping %s\n')
             % nodestr
         )
-    return [c.node() for c in repo.set('roots(%ld)', tostrip)]
+    return [c.node() for c in repo.set(b'roots(%ld)', tostrip)]
 
 
 class stripcallback(object):
@@ -372,7 +372,7 @@
         self.ui = ui
         self.repo = repo
         self.backup = backup
-        self.topic = topic or 'backup'
+        self.topic = topic or b'backup'
         self.nodelist = []
 
     def addnodes(self, nodes):
@@ -399,10 +399,10 @@
         return strip(ui, repo, nodes, backup=backup, topic=topic)
     # transaction postclose callbacks are called in alphabet order.
     # use '\xff' as prefix so we are likely to be called last.
-    callback = tr.getpostclose('\xffstrip')
+    callback = tr.getpostclose(b'\xffstrip')
     if callback is None:
         callback = stripcallback(ui, repo, backup=backup, topic=topic)
-        tr.addpostclose('\xffstrip', callback)
+        tr.addpostclose(b'\xffstrip', callback)
     if topic:
         callback.topic = topic
     callback.addnodes(nodelist)
@@ -415,12 +415,12 @@
 
 def manifestrevlogs(repo):
     yield repo.manifestlog.getstorage(b'')
-    if 'treemanifest' in repo.requirements:
+    if b'treemanifest' in repo.requirements:
         # This logic is safe if treemanifest isn't enabled, but also
         # pointless, so we skip it if treemanifest isn't enabled.
         for unencoded, encoded, size in repo.store.datafiles():
-            if unencoded.startswith('meta/') and unencoded.endswith(
-                '00manifest.i'
+            if unencoded.startswith(b'meta/') and unencoded.endswith(
+                b'00manifest.i'
             ):
                 dir = unencoded[5:-12]
                 yield repo.manifestlog.getstorage(dir)
@@ -433,11 +433,11 @@
     """
     repo = repo.unfiltered()
 
-    if 'fncache' not in repo.requirements:
+    if b'fncache' not in repo.requirements:
         ui.warn(
             _(
-                '(not rebuilding fncache because repository does not '
-                'support fncache)\n'
+                b'(not rebuilding fncache because repository does not '
+                b'support fncache)\n'
             )
         )
         return
@@ -451,7 +451,7 @@
         seenfiles = set()
 
         progress = ui.makeprogress(
-            _('rebuilding'), unit=_('changesets'), total=len(repo)
+            _(b'rebuilding'), unit=_(b'changesets'), total=len(repo)
         )
         for rev in repo:
             progress.update(rev)
@@ -463,8 +463,8 @@
                     continue
                 seenfiles.add(f)
 
-                i = 'data/%s.i' % f
-                d = 'data/%s.d' % f
+                i = b'data/%s.i' % f
+                d = b'data/%s.d' % f
 
                 if repo.store._exists(i):
                     newentries.add(i)
@@ -473,12 +473,12 @@
 
         progress.complete()
 
-        if 'treemanifest' in repo.requirements:
+        if b'treemanifest' in repo.requirements:
             # This logic is safe if treemanifest isn't enabled, but also
             # pointless, so we skip it if treemanifest isn't enabled.
             for dir in util.dirs(seenfiles):
-                i = 'meta/%s/00manifest.i' % dir
-                d = 'meta/%s/00manifest.d' % dir
+                i = b'meta/%s/00manifest.i' % dir
+                d = b'meta/%s/00manifest.d' % dir
 
                 if repo.store._exists(i):
                     newentries.add(i)
@@ -488,22 +488,22 @@
         addcount = len(newentries - oldentries)
         removecount = len(oldentries - newentries)
         for p in sorted(oldentries - newentries):
-            ui.write(_('removing %s\n') % p)
+            ui.write(_(b'removing %s\n') % p)
         for p in sorted(newentries - oldentries):
-            ui.write(_('adding %s\n') % p)
+            ui.write(_(b'adding %s\n') % p)
 
         if addcount or removecount:
             ui.write(
-                _('%d items added, %d removed from fncache\n')
+                _(b'%d items added, %d removed from fncache\n')
                 % (addcount, removecount)
             )
             fnc.entries = newentries
             fnc._dirty = True
 
-            with repo.transaction('fncache') as tr:
+            with repo.transaction(b'fncache') as tr:
                 fnc.write(tr)
         else:
-            ui.write(_('fncache already up to date\n'))
+            ui.write(_(b'fncache already up to date\n'))
 
 
 def deleteobsmarkers(obsstore, indices):
@@ -529,7 +529,7 @@
             continue
         left.append(m)
 
-    newobsstorefile = obsstore.svfs('obsstore', 'w', atomictemp=True)
+    newobsstorefile = obsstore.svfs(b'obsstore', b'w', atomictemp=True)
     for bytes in obsolete.encodemarkers(left, True, obsstore._version):
         newobsstorefile.write(bytes)
     newobsstorefile.close()
--- a/mercurial/repocache.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/repocache.py	Sun Oct 06 09:48:39 2019 -0400
@@ -128,12 +128,12 @@
     """Copy cached attributes from srcrepo to destrepo"""
     destfilecache = destrepo._filecache
     srcfilecache = srcrepo._filecache
-    if 'changelog' in srcfilecache:
-        destfilecache['changelog'] = ce = srcfilecache['changelog']
+    if b'changelog' in srcfilecache:
+        destfilecache[b'changelog'] = ce = srcfilecache[b'changelog']
         ce.obj.opener = ce.obj._realopener = destrepo.svfs
-    if 'obsstore' in srcfilecache:
-        destfilecache['obsstore'] = ce = srcfilecache['obsstore']
+    if b'obsstore' in srcfilecache:
+        destfilecache[b'obsstore'] = ce = srcfilecache[b'obsstore']
         ce.obj.svfs = destrepo.svfs
-    if '_phasecache' in srcfilecache:
-        destfilecache['_phasecache'] = ce = srcfilecache['_phasecache']
+    if b'_phasecache' in srcfilecache:
+        destfilecache[b'_phasecache'] = ce = srcfilecache[b'_phasecache']
         ce.obj.opener = destrepo.svfs
--- a/mercurial/repoview.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/repoview.py	Sun Oct 06 09:48:39 2019 -0400
@@ -31,7 +31,7 @@
     branchmap (see mercurial.utils.repoviewutils.subsettable), you cannot set
     "public" changesets as "hideable". Doing so would break multiple code
     assertions and lead to crashes."""
-    obsoletes = obsolete.getrevs(repo, 'obsolete')
+    obsoletes = obsolete.getrevs(repo, b'obsolete')
     internals = repo._phasecache.getrevset(repo, phases.localhiddenphases)
     internals = frozenset(internals)
     return obsoletes | internals
@@ -107,8 +107,8 @@
     Secret and hidden changeset should not pretend to be here."""
     assert not repo.changelog.filteredrevs
     # fast path in simple case to avoid impact of non optimised code
-    hiddens = filterrevs(repo, 'visible')
-    secrets = filterrevs(repo, 'served.hidden')
+    hiddens = filterrevs(repo, b'visible')
+    secrets = filterrevs(repo, b'served.hidden')
     if secrets:
         return frozenset(hiddens | secrets)
     else:
@@ -120,7 +120,7 @@
     # fast check to avoid revset call on huge repo
     if any(repo._phasecache.phaseroots[1:]):
         getphase = repo._phasecache.phase
-        maymutable = filterrevs(repo, 'base')
+        maymutable = filterrevs(repo, b'base')
         return frozenset(r for r in maymutable if getphase(repo, r))
     return frozenset()
 
@@ -158,12 +158,12 @@
 # Otherwise your filter will have to recompute all its branches cache
 # from scratch (very slow).
 filtertable = {
-    'visible': computehidden,
-    'visible-hidden': computehidden,
-    'served.hidden': computesecret,
-    'served': computeunserved,
-    'immutable': computemutable,
-    'base': computeimpactable,
+    b'visible': computehidden,
+    b'visible-hidden': computehidden,
+    b'served.hidden': computesecret,
+    b'served': computeunserved,
+    b'immutable': computemutable,
+    b'base': computeimpactable,
 }
 
 _basefiltername = list(filtertable)
@@ -175,17 +175,17 @@
     If extra filtering is configured, we make sure the associated filtered view
     are declared and return the associated id.
     """
-    frevs = ui.config('experimental', 'extra-filter-revs')
+    frevs = ui.config(b'experimental', b'extra-filter-revs')
     if frevs is None:
         return None
 
-    fid = pycompat.sysbytes(util.DIGESTS['sha1'](frevs).hexdigest())[:12]
+    fid = pycompat.sysbytes(util.DIGESTS[b'sha1'](frevs).hexdigest())[:12]
 
-    combine = lambda fname: fname + '%' + fid
+    combine = lambda fname: fname + b'%' + fid
 
     subsettable = repoviewutil.subsettable
 
-    if combine('base') not in filtertable:
+    if combine(b'base') not in filtertable:
         for name in _basefiltername:
 
             def extrafilteredrevs(repo, *args, **kwargs):
--- a/mercurial/revlog.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/revlog.py	Sun Oct 06 09:48:39 2019 -0400
@@ -142,7 +142,7 @@
 
 def offset_type(offset, type):
     if (type & ~flagutil.REVIDX_KNOWN_FLAGS) != 0:
-        raise ValueError('unknown revlog index flags')
+        raise ValueError(b'unknown revlog index flags')
     return int(int(offset) << 16 | type)
 
 
@@ -197,7 +197,7 @@
 # 20 bytes: parent 1 nodeid
 # 20 bytes: parent 2 nodeid
 # 20 bytes: nodeid
-indexformatv0 = struct.Struct(">4l20s20s20s")
+indexformatv0 = struct.Struct(b">4l20s20s20s")
 indexformatv0_pack = indexformatv0.pack
 indexformatv0_unpack = indexformatv0.unpack
 
@@ -243,7 +243,7 @@
     def packentry(self, entry, node, version, rev):
         if gettype(entry[0]):
             raise error.RevlogError(
-                _('index entry flags need revlog ' 'version 1')
+                _(b'index entry flags need revlog ' b'version 1')
             )
         e2 = (
             getoffset(entry[0]),
@@ -267,9 +267,9 @@
 #  4 bytes: parent 1 rev
 #  4 bytes: parent 2 rev
 # 32 bytes: nodeid
-indexformatng = struct.Struct(">Qiiiiii20s12x")
+indexformatng = struct.Struct(b">Qiiiiii20s12x")
 indexformatng_pack = indexformatng.pack
-versionformat = struct.Struct(">I")
+versionformat = struct.Struct(b">I")
 versionformat_pack = versionformat.pack
 versionformat_unpack = versionformat.unpack
 
@@ -353,7 +353,7 @@
         """
         self.upperboundcomp = upperboundcomp
         self.indexfile = indexfile
-        self.datafile = datafile or (indexfile[:-2] + ".d")
+        self.datafile = datafile or (indexfile[:-2] + b".d")
         self.opener = opener
         #  When True, indexfile is opened with checkambig=True at writing, to
         #  avoid file stat ambiguity.
@@ -365,7 +365,7 @@
         # Maps rev to chain base rev.
         self._chainbasecache = util.lrucachedict(100)
         # 2-tuple of (offset, data) of raw data from the revlog at an offset.
-        self._chunkcache = (0, '')
+        self._chunkcache = (0, b'')
         # How much data to read and cache into the raw revlog data cache.
         self._chunkcachesize = 65536
         self._maxchainlen = None
@@ -376,7 +376,7 @@
         # Mapping of revision integer to full node.
         self._nodecache = {nullid: nullrev}
         self._nodepos = None
-        self._compengine = 'zlib'
+        self._compengine = b'zlib'
         self._compengineopts = {}
         self._maxdeltachainspan = -1
         self._withsparseread = False
@@ -397,49 +397,49 @@
         mmapindexthreshold = None
         opts = self.opener.options
 
-        if 'revlogv2' in opts:
+        if b'revlogv2' in opts:
             newversionflags = REVLOGV2 | FLAG_INLINE_DATA
-        elif 'revlogv1' in opts:
+        elif b'revlogv1' in opts:
             newversionflags = REVLOGV1 | FLAG_INLINE_DATA
-            if 'generaldelta' in opts:
+            if b'generaldelta' in opts:
                 newversionflags |= FLAG_GENERALDELTA
-        elif 'revlogv0' in self.opener.options:
+        elif b'revlogv0' in self.opener.options:
             newversionflags = REVLOGV0
         else:
             newversionflags = REVLOG_DEFAULT_VERSION
 
-        if 'chunkcachesize' in opts:
-            self._chunkcachesize = opts['chunkcachesize']
-        if 'maxchainlen' in opts:
-            self._maxchainlen = opts['maxchainlen']
-        if 'deltabothparents' in opts:
-            self._deltabothparents = opts['deltabothparents']
-        self._lazydelta = bool(opts.get('lazydelta', True))
+        if b'chunkcachesize' in opts:
+            self._chunkcachesize = opts[b'chunkcachesize']
+        if b'maxchainlen' in opts:
+            self._maxchainlen = opts[b'maxchainlen']
+        if b'deltabothparents' in opts:
+            self._deltabothparents = opts[b'deltabothparents']
+        self._lazydelta = bool(opts.get(b'lazydelta', True))
         self._lazydeltabase = False
         if self._lazydelta:
-            self._lazydeltabase = bool(opts.get('lazydeltabase', False))
-        if 'compengine' in opts:
-            self._compengine = opts['compengine']
-        if 'zlib.level' in opts:
-            self._compengineopts['zlib.level'] = opts['zlib.level']
-        if 'zstd.level' in opts:
-            self._compengineopts['zstd.level'] = opts['zstd.level']
-        if 'maxdeltachainspan' in opts:
-            self._maxdeltachainspan = opts['maxdeltachainspan']
-        if self._mmaplargeindex and 'mmapindexthreshold' in opts:
-            mmapindexthreshold = opts['mmapindexthreshold']
-        self.hassidedata = bool(opts.get('side-data', False))
+            self._lazydeltabase = bool(opts.get(b'lazydeltabase', False))
+        if b'compengine' in opts:
+            self._compengine = opts[b'compengine']
+        if b'zlib.level' in opts:
+            self._compengineopts[b'zlib.level'] = opts[b'zlib.level']
+        if b'zstd.level' in opts:
+            self._compengineopts[b'zstd.level'] = opts[b'zstd.level']
+        if b'maxdeltachainspan' in opts:
+            self._maxdeltachainspan = opts[b'maxdeltachainspan']
+        if self._mmaplargeindex and b'mmapindexthreshold' in opts:
+            mmapindexthreshold = opts[b'mmapindexthreshold']
+        self.hassidedata = bool(opts.get(b'side-data', False))
         if self.hassidedata:
             self._flagprocessors[REVIDX_SIDEDATA] = sidedatautil.processors
-        self._sparserevlog = bool(opts.get('sparse-revlog', False))
-        withsparseread = bool(opts.get('with-sparse-read', False))
+        self._sparserevlog = bool(opts.get(b'sparse-revlog', False))
+        withsparseread = bool(opts.get(b'with-sparse-read', False))
         # sparse-revlog forces sparse-read
         self._withsparseread = self._sparserevlog or withsparseread
-        if 'sparse-read-density-threshold' in opts:
-            self._srdensitythreshold = opts['sparse-read-density-threshold']
-        if 'sparse-read-min-gap-size' in opts:
-            self._srmingapsize = opts['sparse-read-min-gap-size']
-        if opts.get('enableellipsis'):
+        if b'sparse-read-density-threshold' in opts:
+            self._srdensitythreshold = opts[b'sparse-read-density-threshold']
+        if b'sparse-read-min-gap-size' in opts:
+            self._srmingapsize = opts[b'sparse-read-min-gap-size']
+        if opts.get(b'enableellipsis'):
             self._flagprocessors[REVIDX_ELLIPSIS] = ellipsisprocessor
 
         # revlog v0 doesn't have flag processors
@@ -448,16 +448,16 @@
 
         if self._chunkcachesize <= 0:
             raise error.RevlogError(
-                _('revlog chunk cache size %r is not ' 'greater than 0')
+                _(b'revlog chunk cache size %r is not ' b'greater than 0')
                 % self._chunkcachesize
             )
         elif self._chunkcachesize & (self._chunkcachesize - 1):
             raise error.RevlogError(
-                _('revlog chunk cache size %r is not a ' 'power of 2')
+                _(b'revlog chunk cache size %r is not a ' b'power of 2')
                 % self._chunkcachesize
             )
 
-        indexdata = ''
+        indexdata = b''
         self._initempty = True
         try:
             with self._indexfp() as f:
@@ -489,7 +489,7 @@
         if fmt == REVLOGV0:
             if flags:
                 raise error.RevlogError(
-                    _('unknown flags (%#04x) in version %d ' 'revlog %s')
+                    _(b'unknown flags (%#04x) in version %d ' b'revlog %s')
                     % (flags >> 16, fmt, self.indexfile)
                 )
 
@@ -499,7 +499,7 @@
         elif fmt == REVLOGV1:
             if flags & ~REVLOGV1_FLAGS:
                 raise error.RevlogError(
-                    _('unknown flags (%#04x) in version %d ' 'revlog %s')
+                    _(b'unknown flags (%#04x) in version %d ' b'revlog %s')
                     % (flags >> 16, fmt, self.indexfile)
                 )
 
@@ -509,7 +509,7 @@
         elif fmt == REVLOGV2:
             if flags & ~REVLOGV2_FLAGS:
                 raise error.RevlogError(
-                    _('unknown flags (%#04x) in version %d ' 'revlog %s')
+                    _(b'unknown flags (%#04x) in version %d ' b'revlog %s')
                     % (flags >> 16, fmt, self.indexfile)
                 )
 
@@ -519,7 +519,7 @@
 
         else:
             raise error.RevlogError(
-                _('unknown version (%d) in revlog %s') % (fmt, self.indexfile)
+                _(b'unknown version (%d) in revlog %s') % (fmt, self.indexfile)
             )
         # sparse-revlog can't be on without general-delta (issue6056)
         if not self._generaldelta:
@@ -533,7 +533,9 @@
         try:
             d = self._io.parseindex(indexdata, self._inline)
         except (ValueError, IndexError):
-            raise error.RevlogError(_("index %s is corrupted") % self.indexfile)
+            raise error.RevlogError(
+                _(b"index %s is corrupted") % self.indexfile
+            )
         self.index, nodemap, self._chunkcache = d
         if nodemap is not None:
             self.nodemap = self._nodecache = nodemap
@@ -549,16 +551,16 @@
         engine = util.compengines[self._compengine]
         return engine.revlogcompressor(self._compengineopts)
 
-    def _indexfp(self, mode='r'):
+    def _indexfp(self, mode=b'r'):
         """file object for the revlog's index file"""
         args = {r'mode': mode}
-        if mode != 'r':
+        if mode != b'r':
             args[r'checkambig'] = self._checkambig
-        if mode == 'w':
+        if mode == b'w':
             args[r'atomictemp'] = True
         return self.opener(self.indexfile, **args)
 
-    def _datafp(self, mode='r'):
+    def _datafp(self, mode=b'r'):
         """file object for the revlog's data file"""
         return self.opener(self.datafile, mode=mode)
 
@@ -635,7 +637,7 @@
     def clearcaches(self):
         self._revisioncache = None
         self._chainbasecache.clear()
-        self._chunkcache = (0, '')
+        self._chunkcache = (0, b'')
         self._pcache = {}
 
         try:
@@ -656,7 +658,7 @@
             # parsers.c radix tree lookup failed
             if node == wdirid or node in wdirfilenodeids:
                 raise error.WdirUnsupported
-            raise error.LookupError(node, self.indexfile, _('no node'))
+            raise error.LookupError(node, self.indexfile, _(b'no node'))
         except KeyError:
             # pure python cache lookup failed
             n = self._nodecache
@@ -674,7 +676,7 @@
                     return r
             if node == wdirid or node in wdirfilenodeids:
                 raise error.WdirUnsupported
-            raise error.LookupError(node, self.indexfile, _('no node'))
+            raise error.LookupError(node, self.indexfile, _(b'no node'))
 
     # Accessors for index entries.
 
@@ -848,7 +850,7 @@
         if rustancestor is not None:
             lazyancestors = rustancestor.LazyAncestors
             arg = self.index
-        elif util.safehasattr(parsers, 'rustlazyancestors'):
+        elif util.safehasattr(parsers, b'rustlazyancestors'):
             lazyancestors = ancestor.rustlazyancestors
             arg = self.index
         else:
@@ -1289,7 +1291,7 @@
         try:
             # str(rev)
             rev = int(id)
-            if "%d" % rev != id:
+            if b"%d" % rev != id:
                 raise ValueError
             if rev < 0:
                 rev = len(self) + rev
@@ -1326,7 +1328,7 @@
             # fast path: for unfiltered changelog, radix tree is accurate
             if not getattr(self, 'filteredrevs', None):
                 raise error.AmbiguousPrefixLookupError(
-                    id, self.indexfile, _('ambiguous identifier')
+                    id, self.indexfile, _(b'ambiguous identifier')
                 )
             # fall through to slow path that filters hidden revisions
         except (AttributeError, ValueError):
@@ -1352,7 +1354,7 @@
                         self._pcache[id] = nl[0]
                         return nl[0]
                     raise error.AmbiguousPrefixLookupError(
-                        id, self.indexfile, _('ambiguous identifier')
+                        id, self.indexfile, _(b'ambiguous identifier')
                     )
                 if maybewdir:
                     raise error.WdirUnsupported
@@ -1372,7 +1374,7 @@
         if n:
             return n
 
-        raise error.LookupError(id, self.indexfile, _('no match found'))
+        raise error.LookupError(id, self.indexfile, _(b'no match found'))
 
     def shortest(self, node, minlength=1):
         """Find the shortest unambiguous prefix that matches node."""
@@ -1386,11 +1388,11 @@
                 # single 'ff...' match
                 return True
             if matchednode is None:
-                raise error.LookupError(node, self.indexfile, _('no node'))
+                raise error.LookupError(node, self.indexfile, _(b'no node'))
             return True
 
         def maybewdir(prefix):
-            return all(c == 'f' for c in pycompat.iterbytestr(prefix))
+            return all(c == b'f' for c in pycompat.iterbytestr(prefix))
 
         hexnode = hex(node)
 
@@ -1407,7 +1409,7 @@
                 return disambiguate(hexnode, length)
             except error.RevlogError:
                 if node != wdirid:
-                    raise error.LookupError(node, self.indexfile, _('no node'))
+                    raise error.LookupError(node, self.indexfile, _(b'no node'))
             except AttributeError:
                 # Fall through to pure code
                 pass
@@ -1474,8 +1476,8 @@
             if len(d) - startoffset < length:
                 raise error.RevlogError(
                     _(
-                        'partial read of revlog %s; expected %d bytes from '
-                        'offset %d, got %d'
+                        b'partial read of revlog %s; expected %d bytes from '
+                        b'offset %d, got %d'
                     )
                     % (
                         self.indexfile if self._inline else self.datafile,
@@ -1490,8 +1492,8 @@
         if len(d) < length:
             raise error.RevlogError(
                 _(
-                    'partial read of revlog %s; expected %d bytes from offset '
-                    '%d, got %d'
+                    b'partial read of revlog %s; expected %d bytes from offset '
+                    b'%d, got %d'
                 )
                 % (
                     self.indexfile if self._inline else self.datafile,
@@ -1629,7 +1631,7 @@
 
     def _chunkclear(self):
         """Clear the raw chunk cache."""
-        self._chunkcache = (0, '')
+        self._chunkcache = (0, b'')
 
     def deltaparent(self, rev):
         """return deltaparent of the given revision"""
@@ -1646,7 +1648,7 @@
         """
         if not self._sparserevlog:
             return self.deltaparent(rev) == nullrev
-        elif util.safehasattr(self.index, 'issnapshot'):
+        elif util.safehasattr(self.index, b'issnapshot'):
             # directly assign the method to cache the testing and access
             self.issnapshot = self.index.issnapshot
             return self.issnapshot(rev)
@@ -1667,7 +1669,7 @@
     def snapshotdepth(self, rev):
         """number of snapshot in the chain before this one"""
         if not self.issnapshot(rev):
-            raise error.ProgrammingError('revision %d not a snapshot')
+            raise error.ProgrammingError(b'revision %d not a snapshot')
         return len(self._deltachain(rev)[0]) - 1
 
     def revdiff(self, rev1, rev2):
@@ -1683,11 +1685,11 @@
 
     def _processflags(self, text, flags, operation, raw=False):
         """deprecated entry point to access flag processors"""
-        msg = '_processflag(...) use the specialized variant'
-        util.nouideprecwarn(msg, '5.2', stacklevel=2)
+        msg = b'_processflag(...) use the specialized variant'
+        util.nouideprecwarn(msg, b'5.2', stacklevel=2)
         if raw:
             return text, flagutil.processflagsraw(self, text, flags)
-        elif operation == 'read':
+        elif operation == b'read':
             return flagutil.processflagsread(self, text, flags)
         else:  # write operation
             return flagutil.processflagswrite(self, text, flags)
@@ -1703,10 +1705,10 @@
         """
         if raw:
             msg = (
-                'revlog.revision(..., raw=True) is deprecated, '
-                'use revlog.rawdata(...)'
+                b'revlog.revision(..., raw=True) is deprecated, '
+                b'use revlog.rawdata(...)'
             )
-            util.nouideprecwarn(msg, '5.2', stacklevel=2)
+            util.nouideprecwarn(msg, b'5.2', stacklevel=2)
         return self._revisiondata(nodeorrev, _df, raw=raw)[0]
 
     def sidedata(self, nodeorrev, _df=None):
@@ -1729,7 +1731,7 @@
 
         # fast path the special `nullid` rev
         if node == nullid:
-            return "", {}
+            return b"", {}
 
         # The text as stored inside the revlog. Might be the revision or might
         # need to be processed to retrieve the revision.
@@ -1759,7 +1761,7 @@
             try:
                 r = flagutil.processflagsread(self, rawtext, flags)
             except error.SidedataHashError as exc:
-                msg = _("integrity check failed on %s:%s sidedata key %d")
+                msg = _(b"integrity check failed on %s:%s sidedata key %d")
                 msg %= (self.indexfile, pycompat.bytestr(rev), exc.sidedatakey)
                 raise error.RevlogError(msg)
             text, validatehash, sidedata = r
@@ -1851,7 +1853,7 @@
                 if revornode is None:
                     revornode = templatefilters.short(hex(node))
                 raise error.RevlogError(
-                    _("integrity check failed on %s:%s")
+                    _(b"integrity check failed on %s:%s")
                     % (self.indexfile, pycompat.bytestr(revornode))
                 )
         except error.RevlogError:
@@ -1876,7 +1878,7 @@
         trinfo = tr.find(self.indexfile)
         if trinfo is None:
             raise error.RevlogError(
-                _("%s not found in the transaction") % self.indexfile
+                _(b"%s not found in the transaction") % self.indexfile
             )
 
         trindex = trinfo[2]
@@ -1896,11 +1898,11 @@
             # its usage.
             self._writinghandles = None
 
-        with self._indexfp('r') as ifh, self._datafp('w') as dfh:
+        with self._indexfp(b'r') as ifh, self._datafp(b'w') as dfh:
             for r in self:
                 dfh.write(self._getsegmentforrevs(r, r, df=ifh)[1])
 
-        with self._indexfp('w') as fp:
+        with self._indexfp(b'w') as fp:
             self.version &= ~FLAG_INLINE_DATA
             self._inline = False
             io = self._io
@@ -1947,7 +1949,7 @@
         """
         if link == nullrev:
             raise error.RevlogError(
-                _("attempted to add linkrev -1 to %s") % self.indexfile
+                _(b"attempted to add linkrev -1 to %s") % self.indexfile
             )
 
         if sidedata is None:
@@ -1955,7 +1957,7 @@
             flags = flags & ~REVIDX_SIDEDATA
         elif not self.hassidedata:
             raise error.ProgrammingError(
-                _("trying to add sidedata to a revlog who don't support them")
+                _(b"trying to add sidedata to a revlog who don't support them")
             )
         else:
             flags |= REVIDX_SIDEDATA
@@ -1974,7 +1976,9 @@
 
         if len(rawtext) > _maxentrysize:
             raise error.RevlogError(
-                _("%s: size of %d bytes exceeds maximum revlog storage of 2GiB")
+                _(
+                    b"%s: size of %d bytes exceeds maximum revlog storage of 2GiB"
+                )
                 % (self.indexfile, len(rawtext))
             )
 
@@ -2015,8 +2019,8 @@
         """
         dfh = None
         if not self._inline:
-            dfh = self._datafp("a+")
-        ifh = self._indexfp("a+")
+            dfh = self._datafp(b"a+")
+        ifh = self._indexfp(b"a+")
         try:
             return self._addrevision(
                 node,
@@ -2039,17 +2043,17 @@
     def compress(self, data):
         """Generate a possibly-compressed representation of data."""
         if not data:
-            return '', data
+            return b'', data
 
         compressed = self._compressor.compress(data)
 
         if compressed:
             # The revlog compressor added the header in the returned data.
-            return '', compressed
-
-        if data[0:1] == '\0':
-            return '', data
-        return 'u', data
+            return b'', compressed
+
+        if data[0:1] == b'\0':
+            return b'', data
+        return b'u', data
 
     def decompress(self, data):
         """Decompress a revlog chunk.
@@ -2083,18 +2087,18 @@
         # compressed chunks. And this matters for changelog and manifest reads.
         t = data[0:1]
 
-        if t == 'x':
+        if t == b'x':
             try:
                 return _zlibdecompress(data)
             except zlib.error as e:
                 raise error.RevlogError(
-                    _('revlog decompress error: %s')
+                    _(b'revlog decompress error: %s')
                     % stringutil.forcebytestr(e)
                 )
         # '\0' is more common than 'u' so it goes first.
-        elif t == '\0':
+        elif t == b'\0':
             return data
-        elif t == 'u':
+        elif t == b'u':
             return util.buffer(data, 1)
 
         try:
@@ -2105,7 +2109,7 @@
                 compressor = engine.revlogcompressor(self._compengineopts)
                 self._decompressors[t] = compressor
             except KeyError:
-                raise error.RevlogError(_('unknown compression type %r') % t)
+                raise error.RevlogError(_(b'unknown compression type %r') % t)
 
         return compressor.decompress(data)
 
@@ -2139,11 +2143,11 @@
         """
         if node == nullid:
             raise error.RevlogError(
-                _("%s: attempt to add null revision") % self.indexfile
+                _(b"%s: attempt to add null revision") % self.indexfile
             )
         if node == wdirid or node in wdirfilenodeids:
             raise error.RevlogError(
-                _("%s: attempt to add wdir revision") % self.indexfile
+                _(b"%s: attempt to add wdir revision") % self.indexfile
             )
 
         if self._inline:
@@ -2256,7 +2260,7 @@
         """
 
         if self._writinghandles:
-            raise error.ProgrammingError('cannot nest addgroup() calls')
+            raise error.ProgrammingError(b'cannot nest addgroup() calls')
 
         nodes = []
 
@@ -2264,7 +2268,7 @@
         end = 0
         if r:
             end = self.end(r - 1)
-        ifh = self._indexfp("a+")
+        ifh = self._indexfp(b"a+")
         isize = r * self._io.size
         if self._inline:
             transaction.add(self.indexfile, end + isize, r)
@@ -2272,7 +2276,7 @@
         else:
             transaction.add(self.indexfile, isize, r)
             transaction.add(self.datafile, end)
-            dfh = self._datafp("a+")
+            dfh = self._datafp(b"a+")
 
         def flush():
             if dfh:
@@ -2299,12 +2303,12 @@
                 for p in (p1, p2):
                     if p not in self.nodemap:
                         raise error.LookupError(
-                            p, self.indexfile, _('unknown parent')
+                            p, self.indexfile, _(b'unknown parent')
                         )
 
                 if deltabase not in self.nodemap:
                     raise error.LookupError(
-                        deltabase, self.indexfile, _('unknown delta base')
+                        deltabase, self.indexfile, _(b'unknown delta base')
                     )
 
                 baserev = self.rev(deltabase)
@@ -2312,7 +2316,7 @@
                 if baserev != nullrev and self.iscensored(baserev):
                     # if base is censored, delta must be full replacement in a
                     # single patch operation
-                    hlen = struct.calcsize(">lll")
+                    hlen = struct.calcsize(b">lll")
                     oldlen = self.rawsize(baserev)
                     newlen = len(delta) - hlen
                     if delta[:hlen] != mdiff.replacediffheader(oldlen, newlen):
@@ -2352,8 +2356,8 @@
                     # addrevision switched from inline to conventional
                     # reopen the index
                     ifh.close()
-                    dfh = self._datafp("a+")
-                    ifh = self._indexfp("a+")
+                    dfh = self._datafp(b"a+")
+                    ifh = self._indexfp(b"a+")
                     self._writinghandles = (ifh, dfh)
         finally:
             self._writinghandles = None
@@ -2491,13 +2495,13 @@
         assumehaveparentrevisions=False,
         deltamode=repository.CG_DELTAMODE_STD,
     ):
-        if nodesorder not in ('nodes', 'storage', 'linear', None):
+        if nodesorder not in (b'nodes', b'storage', b'linear', None):
             raise error.ProgrammingError(
-                'unhandled value for nodesorder: %s' % nodesorder
+                b'unhandled value for nodesorder: %s' % nodesorder
             )
 
         if nodesorder is None and not self._generaldelta:
-            nodesorder = 'storage'
+            nodesorder = b'storage'
 
         if (
             not self._storedeltachains
@@ -2520,13 +2524,13 @@
             assumehaveparentrevisions=assumehaveparentrevisions,
         )
 
-    DELTAREUSEALWAYS = 'always'
-    DELTAREUSESAMEREVS = 'samerevs'
-    DELTAREUSENEVER = 'never'
-
-    DELTAREUSEFULLADD = 'fulladd'
-
-    DELTAREUSEALL = {'always', 'samerevs', 'never', 'fulladd'}
+    DELTAREUSEALWAYS = b'always'
+    DELTAREUSESAMEREVS = b'samerevs'
+    DELTAREUSENEVER = b'never'
+
+    DELTAREUSEFULLADD = b'fulladd'
+
+    DELTAREUSEALL = {b'always', b'samerevs', b'never', b'fulladd'}
 
     def clone(
         self,
@@ -2578,15 +2582,17 @@
         for merges. By default, the current default is used.
         """
         if deltareuse not in self.DELTAREUSEALL:
-            raise ValueError(_('value for deltareuse invalid: %s') % deltareuse)
+            raise ValueError(
+                _(b'value for deltareuse invalid: %s') % deltareuse
+            )
 
         if len(destrevlog):
-            raise ValueError(_('destination revlog is not empty'))
+            raise ValueError(_(b'destination revlog is not empty'))
 
         if getattr(self, 'filteredrevs', None):
-            raise ValueError(_('source revlog has filtered revisions'))
+            raise ValueError(_(b'source revlog has filtered revisions'))
         if getattr(destrevlog, 'filteredrevs', None):
-            raise ValueError(_('destination revlog has filtered revisions'))
+            raise ValueError(_(b'destination revlog has filtered revisions'))
 
         # lazydelta and lazydeltabase controls whether to reuse a cached delta,
         # if possible.
@@ -2660,11 +2666,11 @@
                     rawtext = self.rawdata(rev)
 
                 ifh = destrevlog.opener(
-                    destrevlog.indexfile, 'a+', checkambig=False
+                    destrevlog.indexfile, b'a+', checkambig=False
                 )
                 dfh = None
                 if not destrevlog._inline:
-                    dfh = destrevlog.opener(destrevlog.datafile, 'a+')
+                    dfh = destrevlog.opener(destrevlog.datafile, b'a+')
                 try:
                     destrevlog._addrevision(
                         node,
@@ -2690,7 +2696,7 @@
     def censorrevision(self, tr, censornode, tombstone=b''):
         if (self.version & 0xFFFF) == REVLOGV0:
             raise error.RevlogError(
-                _('cannot censor with version %d revlogs') % self.version
+                _(b'cannot censor with version %d revlogs') % self.version
             )
 
         censorrev = self.rev(censornode)
@@ -2698,7 +2704,7 @@
 
         if len(tombstone) > self.rawsize(censorrev):
             raise error.Abort(
-                _('censor tombstone must be no longer than ' 'censored data')
+                _(b'censor tombstone must be no longer than ' b'censored data')
             )
 
         # Rewriting the revlog in place is hard. Our strategy for censoring is
@@ -2732,13 +2738,13 @@
                 if newrl.deltaparent(rev) != nullrev:
                     raise error.Abort(
                         _(
-                            'censored revision stored as delta; '
-                            'cannot censor'
+                            b'censored revision stored as delta; '
+                            b'cannot censor'
                         ),
                         hint=_(
-                            'censoring of revlogs is not '
-                            'fully implemented; please report '
-                            'this bug'
+                            b'censoring of revlogs is not '
+                            b'fully implemented; please report '
+                            b'this bug'
                         ),
                     )
                 continue
@@ -2747,8 +2753,8 @@
                 if self.deltaparent(rev) != nullrev:
                     raise error.Abort(
                         _(
-                            'cannot censor due to censored '
-                            'revision having delta stored'
+                            b'cannot censor due to censored '
+                            b'revision having delta stored'
                         )
                     )
                 rawtext = self._chunk(rev)
@@ -2759,9 +2765,9 @@
                 rawtext, tr, self.linkrev(rev), p1, p2, node, self.flags(rev)
             )
 
-        tr.addbackup(self.indexfile, location='store')
+        tr.addbackup(self.indexfile, location=b'store')
         if not self._inline:
-            tr.addbackup(self.datafile, location='store')
+            tr.addbackup(self.datafile, location=b'store')
 
         self.opener.rename(newrl.indexfile, self.indexfile)
         if not self._inline:
@@ -2778,20 +2784,20 @@
         """
         dd, di = self.checksize()
         if dd:
-            yield revlogproblem(error=_('data length off by %d bytes') % dd)
+            yield revlogproblem(error=_(b'data length off by %d bytes') % dd)
         if di:
-            yield revlogproblem(error=_('index contains %d extra bytes') % di)
+            yield revlogproblem(error=_(b'index contains %d extra bytes') % di)
 
         version = self.version & 0xFFFF
 
         # The verifier tells us what version revlog we should be.
-        if version != state['expectedversion']:
+        if version != state[b'expectedversion']:
             yield revlogproblem(
-                warning=_("warning: '%s' uses revlog format %d; expected %d")
-                % (self.indexfile, version, state['expectedversion'])
+                warning=_(b"warning: '%s' uses revlog format %d; expected %d")
+                % (self.indexfile, version, state[b'expectedversion'])
             )
 
-        state['skipread'] = set()
+        state[b'skipread'] = set()
 
         for rev in self:
             node = self.node(rev)
@@ -2845,12 +2851,12 @@
             #     use either "text" (external), or "rawtext" (in revlog).
 
             try:
-                skipflags = state.get('skipflags', 0)
+                skipflags = state.get(b'skipflags', 0)
                 if skipflags:
                     skipflags &= self.flags(rev)
 
                 if skipflags:
-                    state['skipread'].add(node)
+                    state[b'skipread'].add(node)
                 else:
                     # Side-effect: read content and verify hash.
                     self.revision(node)
@@ -2860,23 +2866,23 @@
 
                 if l1 != l2:
                     yield revlogproblem(
-                        error=_('unpacked size is %d, %d expected') % (l2, l1),
+                        error=_(b'unpacked size is %d, %d expected') % (l2, l1),
                         node=node,
                     )
 
             except error.CensoredNodeError:
-                if state['erroroncensored']:
+                if state[b'erroroncensored']:
                     yield revlogproblem(
-                        error=_('censored file data'), node=node
+                        error=_(b'censored file data'), node=node
                     )
-                    state['skipread'].add(node)
+                    state[b'skipread'].add(node)
             except Exception as e:
                 yield revlogproblem(
-                    error=_('unpacking %s: %s')
+                    error=_(b'unpacking %s: %s')
                     % (short(node), stringutil.forcebytestr(e)),
                     node=node,
                 )
-                state['skipread'].add(node)
+                state[b'skipread'].add(node)
 
     def storageinfo(
         self,
@@ -2889,21 +2895,21 @@
         d = {}
 
         if exclusivefiles:
-            d['exclusivefiles'] = [(self.opener, self.indexfile)]
+            d[b'exclusivefiles'] = [(self.opener, self.indexfile)]
             if not self._inline:
-                d['exclusivefiles'].append((self.opener, self.datafile))
+                d[b'exclusivefiles'].append((self.opener, self.datafile))
 
         if sharedfiles:
-            d['sharedfiles'] = []
+            d[b'sharedfiles'] = []
 
         if revisionscount:
-            d['revisionscount'] = len(self)
+            d[b'revisionscount'] = len(self)
 
         if trackedsize:
-            d['trackedsize'] = sum(map(self.rawsize, iter(self)))
+            d[b'trackedsize'] = sum(map(self.rawsize, iter(self)))
 
         if storedsize:
-            d['storedsize'] = sum(
+            d[b'storedsize'] = sum(
                 self.opener.stat(path).st_size for path in self.files()
             )
 
--- a/mercurial/revlogutils/deltas.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/revlogutils/deltas.py	Sun Oct 06 09:48:39 2019 -0400
@@ -514,7 +514,7 @@
     # special case deltas which replace entire base; no need to decode
     # base revision. this neatly avoids censored bases, which throw when
     # they're decoded.
-    hlen = struct.calcsize(">lll")
+    hlen = struct.calcsize(b">lll")
     if delta[:hlen] == mdiff.replacediffheader(
         revlog.rawsize(baserev), len(delta) - hlen
     ):
@@ -531,7 +531,7 @@
             revlog.checkhash(fulltext, expectednode, p1=p1, p2=p2)
         if flags & REVIDX_ISCENSORED:
             raise error.StorageError(
-                _('node %s is not censored') % expectednode
+                _(b'node %s is not censored') % expectednode
             )
     except error.CensoredNodeError:
         # must pass the censored index flag to add censored revisions
@@ -726,7 +726,7 @@
 
 def _findsnapshots(revlog, cache, start_rev):
     """find snapshot from start_rev to tip"""
-    if util.safehasattr(revlog.index, 'findsnapshots'):
+    if util.safehasattr(revlog.index, b'findsnapshots'):
         revlog.index.findsnapshots(cache, start_rev)
     else:
         deltaparent = revlog.deltaparent
--- a/mercurial/revlogutils/flagutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/revlogutils/flagutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -70,13 +70,13 @@
 
 def insertflagprocessor(flag, processor, flagprocessors):
     if not flag & REVIDX_KNOWN_FLAGS:
-        msg = _("cannot register processor on unknown flag '%#x'.") % flag
+        msg = _(b"cannot register processor on unknown flag '%#x'.") % flag
         raise error.ProgrammingError(msg)
     if flag not in REVIDX_FLAGS_ORDER:
-        msg = _("flag '%#x' undefined in REVIDX_FLAGS_ORDER.") % flag
+        msg = _(b"flag '%#x' undefined in REVIDX_FLAGS_ORDER.") % flag
         raise error.ProgrammingError(msg)
     if flag in flagprocessors:
-        msg = _("cannot register multiple processors on flag '%#x'.") % flag
+        msg = _(b"cannot register multiple processors on flag '%#x'.") % flag
         raise error.Abort(msg)
     flagprocessors[flag] = processor
 
@@ -97,7 +97,7 @@
     processed text and ``validatehash`` is a bool indicating whether the
     returned text should be checked for hash integrity.
     """
-    return _processflagsfunc(revlog, text, flags, 'write', sidedata=sidedata)[
+    return _processflagsfunc(revlog, text, flags, b'write', sidedata=sidedata)[
         :2
     ]
 
@@ -120,7 +120,7 @@
     processed text and ``validatehash`` is a bool indicating whether the
     returned text should be checked for hash integrity.
     """
-    return _processflagsfunc(revlog, text, flags, 'read')
+    return _processflagsfunc(revlog, text, flags, b'read')
 
 
 def processflagsraw(revlog, text, flags):
@@ -139,7 +139,7 @@
     processed text and ``validatehash`` is a bool indicating whether the
     returned text should be checked for hash integrity.
     """
-    return _processflagsfunc(revlog, text, flags, 'raw')[1]
+    return _processflagsfunc(revlog, text, flags, b'raw')[1]
 
 
 def _processflagsfunc(revlog, text, flags, operation, sidedata=None):
@@ -150,19 +150,19 @@
     # fast path: no flag processors will run
     if flags == 0:
         return text, True, {}
-    if operation not in ('read', 'write', 'raw'):
-        raise error.ProgrammingError(_("invalid '%s' operation") % operation)
+    if operation not in (b'read', b'write', b'raw'):
+        raise error.ProgrammingError(_(b"invalid '%s' operation") % operation)
     # Check all flags are known.
     if flags & ~REVIDX_KNOWN_FLAGS:
         raise revlog._flagserrorclass(
-            _("incompatible revision flag '%#x'")
+            _(b"incompatible revision flag '%#x'")
             % (flags & ~REVIDX_KNOWN_FLAGS)
         )
     validatehash = True
     # Depending on the operation (read or write), the order might be
     # reversed due to non-commutative transforms.
     orderedflags = REVIDX_FLAGS_ORDER
-    if operation == 'write':
+    if operation == b'write':
         orderedflags = reversed(orderedflags)
 
     outsidedata = {}
@@ -173,16 +173,16 @@
             vhash = True
 
             if flag not in revlog._flagprocessors:
-                message = _("missing processor for flag '%#x'") % flag
+                message = _(b"missing processor for flag '%#x'") % flag
                 raise revlog._flagserrorclass(message)
 
             processor = revlog._flagprocessors[flag]
             if processor is not None:
                 readtransform, writetransform, rawtransform = processor
 
-                if operation == 'raw':
+                if operation == b'raw':
                     vhash = rawtransform(revlog, text)
-                elif operation == 'read':
+                elif operation == b'read':
                     text, vhash, s = readtransform(revlog, text)
                     outsidedata.update(s)
                 else:  # write operation
--- a/mercurial/revlogutils/sidedata.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/revlogutils/sidedata.py	Sun Oct 06 09:48:39 2019 -0400
@@ -63,7 +63,7 @@
     for key, value in sidedata:
         rawtext.append(value)
     rawtext.append(bytes(text))
-    return ''.join(rawtext), False
+    return b''.join(rawtext), False
 
 
 def sidedatareadprocessor(rl, text):
--- a/mercurial/revset.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/revset.py	Sun Oct 06 09:48:39 2019 -0400
@@ -93,22 +93,22 @@
 #
 # There are a few revsets that always redefine the order if 'define' is
 # specified: 'sort(X)', 'reverse(X)', 'x:y'.
-anyorder = 'any'  # don't care the order, could be even random-shuffled
-defineorder = 'define'  # ALWAYS redefine, or ALWAYS follow the current order
-followorder = 'follow'  # MUST follow the current order
+anyorder = b'any'  # don't care the order, could be even random-shuffled
+defineorder = b'define'  # ALWAYS redefine, or ALWAYS follow the current order
+followorder = b'follow'  # MUST follow the current order
 
 # helpers
 
 
 def getset(repo, subset, x, order=defineorder):
     if not x:
-        raise error.ParseError(_("missing argument"))
+        raise error.ParseError(_(b"missing argument"))
     return methods[x[0]](repo, subset, *x[1:], order=order)
 
 
 def _getrevsource(repo, r):
     extra = repo[r].extra()
-    for label in ('source', 'transplant_source', 'rebase_source'):
+    for label in (b'source', b'transplant_source', b'rebase_source'):
         if label in extra:
             try:
                 return repo[extra[label]].rev()
@@ -126,7 +126,7 @@
 
 def stringset(repo, subset, x, order):
     if not x:
-        raise error.ParseError(_("empty string is not a valid revision"))
+        raise error.ParseError(_(b"empty string is not a valid revision"))
     x = scmutil.intrev(scmutil.revsymbol(repo, x))
     if x in subset or x in _virtualrevs and isinstance(subset, fullreposet):
         return baseset([x])
@@ -246,7 +246,7 @@
 
 
 def relationset(repo, subset, x, y, order):
-    raise error.ParseError(_("can't use a relation in this context"))
+    raise error.ParseError(_(b"can't use a relation in this context"))
 
 
 def _splitrange(a, b):
@@ -285,8 +285,8 @@
     # descendants() predicates
     a, b = getintrange(
         z,
-        _('relation subscript must be an integer or a range'),
-        _('relation subscript bounds must be integers'),
+        _(b'relation subscript must be an integer or a range'),
+        _(b'relation subscript bounds must be integers'),
         deffirst=-(dagop.maxlogdepth - 1),
         deflast=+(dagop.maxlogdepth - 1),
     )
@@ -323,18 +323,18 @@
 
 
 def subscriptset(repo, subset, x, y, order):
-    raise error.ParseError(_("can't use a subscript in this context"))
+    raise error.ParseError(_(b"can't use a subscript in this context"))
 
 
 def listset(repo, subset, *xs, **opts):
     raise error.ParseError(
-        _("can't use a list in this context"),
-        hint=_('see \'hg help "revsets.x or y"\''),
+        _(b"can't use a list in this context"),
+        hint=_(b'see \'hg help "revsets.x or y"\''),
     )
 
 
 def keyvaluepair(repo, subset, k, v, order):
-    raise error.ParseError(_("can't use a key-value pair in this context"))
+    raise error.ParseError(_(b"can't use a key-value pair in this context"))
 
 
 def func(repo, subset, a, b, order):
@@ -369,16 +369,16 @@
 predicate = registrar.revsetpredicate()
 
 
-@predicate('_destupdate')
+@predicate(b'_destupdate')
 def _destupdate(repo, subset, x):
     # experimental revset for update destination
-    args = getargsdict(x, 'limit', 'clean')
+    args = getargsdict(x, b'limit', b'clean')
     return subset & baseset(
         [destutil.destupdate(repo, **pycompat.strkwargs(args))[0]]
     )
 
 
-@predicate('_destmerge')
+@predicate(b'_destmerge')
 def _destmerge(repo, subset, x):
     # experimental revset for merge destination
     sourceset = None
@@ -387,7 +387,7 @@
     return subset & baseset([destutil.destmerge(repo, sourceset=sourceset)])
 
 
-@predicate('adds(pattern)', safe=True, weight=30)
+@predicate(b'adds(pattern)', safe=True, weight=30)
 def adds(repo, subset, x):
     """Changesets that add a file matching pattern.
 
@@ -396,11 +396,11 @@
     directory.
     """
     # i18n: "adds" is a keyword
-    pat = getstring(x, _("adds requires a pattern"))
+    pat = getstring(x, _(b"adds requires a pattern"))
     return checkstatus(repo, subset, pat, 1)
 
 
-@predicate('ancestor(*changeset)', safe=True, weight=0.5)
+@predicate(b'ancestor(*changeset)', safe=True, weight=0.5)
 def ancestor(repo, subset, x):
     """A greatest common ancestor of the changesets.
 
@@ -432,7 +432,7 @@
     return subset & s
 
 
-@predicate('ancestors(set[, depth])', safe=True)
+@predicate(b'ancestors(set[, depth])', safe=True)
 def ancestors(repo, subset, x):
     """Changesets that are ancestors of changesets in set, including the
     given changesets themselves.
@@ -441,30 +441,30 @@
     the specified generation.
     """
     # startdepth is for internal use only until we can decide the UI
-    args = getargsdict(x, 'ancestors', 'set depth startdepth')
-    if 'set' not in args:
+    args = getargsdict(x, b'ancestors', b'set depth startdepth')
+    if b'set' not in args:
         # i18n: "ancestors" is a keyword
-        raise error.ParseError(_('ancestors takes at least 1 argument'))
+        raise error.ParseError(_(b'ancestors takes at least 1 argument'))
     startdepth = stopdepth = None
-    if 'startdepth' in args:
+    if b'startdepth' in args:
         n = getinteger(
-            args['startdepth'], "ancestors expects an integer startdepth"
+            args[b'startdepth'], b"ancestors expects an integer startdepth"
         )
         if n < 0:
-            raise error.ParseError("negative startdepth")
+            raise error.ParseError(b"negative startdepth")
         startdepth = n
-    if 'depth' in args:
+    if b'depth' in args:
         # i18n: "ancestors" is a keyword
-        n = getinteger(args['depth'], _("ancestors expects an integer depth"))
+        n = getinteger(args[b'depth'], _(b"ancestors expects an integer depth"))
         if n < 0:
-            raise error.ParseError(_("negative depth"))
+            raise error.ParseError(_(b"negative depth"))
         stopdepth = n + 1
     return _ancestors(
-        repo, subset, args['set'], startdepth=startdepth, stopdepth=stopdepth
+        repo, subset, args[b'set'], startdepth=startdepth, stopdepth=stopdepth
     )
 
 
-@predicate('_firstancestors', safe=True)
+@predicate(b'_firstancestors', safe=True)
 def _firstancestors(repo, subset, x):
     # ``_firstancestors(set)``
     # Like ``ancestors(set)`` but follows only the first parents.
@@ -483,7 +483,7 @@
                 break
             if len(c) > 1:
                 raise error.RepoLookupError(
-                    _("revision in set has more than one child")
+                    _(b"revision in set has more than one child")
                 )
             r = c[0].rev()
         else:
@@ -496,7 +496,7 @@
     Changesets that are the Nth ancestor (first parents only) of a changeset
     in set.
     """
-    n = getinteger(n, _("~ expects a number"))
+    n = getinteger(n, _(b"~ expects a number"))
     if n < 0:
         # children lookup
         return _childrenspec(repo, subset, x, -n, order)
@@ -512,19 +512,19 @@
     return subset & ps
 
 
-@predicate('author(string)', safe=True, weight=10)
+@predicate(b'author(string)', safe=True, weight=10)
 def author(repo, subset, x):
     """Alias for ``user(string)``.
     """
     # i18n: "author" is a keyword
-    n = getstring(x, _("author requires a string"))
+    n = getstring(x, _(b"author requires a string"))
     kind, pattern, matcher = _substringmatcher(n, casesensitive=False)
     return subset.filter(
-        lambda x: matcher(repo[x].user()), condrepr=('<user %r>', n)
+        lambda x: matcher(repo[x].user()), condrepr=(b'<user %r>', n)
     )
 
 
-@predicate('bisect(string)', safe=True)
+@predicate(b'bisect(string)', safe=True)
 def bisect(repo, subset, x):
     """Changesets marked in the specified bisect status:
 
@@ -537,41 +537,41 @@
     - ``current``            : the cset currently being bisected
     """
     # i18n: "bisect" is a keyword
-    status = getstring(x, _("bisect requires a string")).lower()
+    status = getstring(x, _(b"bisect requires a string")).lower()
     state = set(hbisect.get(repo, status))
     return subset & state
 
 
 # Backward-compatibility
 # - no help entry so that we do not advertise it any more
-@predicate('bisected', safe=True)
+@predicate(b'bisected', safe=True)
 def bisected(repo, subset, x):
     return bisect(repo, subset, x)
 
 
-@predicate('bookmark([name])', safe=True)
+@predicate(b'bookmark([name])', safe=True)
 def bookmark(repo, subset, x):
     """The named bookmark or all bookmarks.
 
     Pattern matching is supported for `name`. See :hg:`help revisions.patterns`.
     """
     # i18n: "bookmark" is a keyword
-    args = getargs(x, 0, 1, _('bookmark takes one or no arguments'))
+    args = getargs(x, 0, 1, _(b'bookmark takes one or no arguments'))
     if args:
         bm = getstring(
             args[0],
             # i18n: "bookmark" is a keyword
-            _('the argument to bookmark must be a string'),
+            _(b'the argument to bookmark must be a string'),
         )
         kind, pattern, matcher = stringutil.stringmatcher(bm)
         bms = set()
-        if kind == 'literal':
+        if kind == b'literal':
             if bm == pattern:
                 pattern = repo._bookmarks.expandname(pattern)
             bmrev = repo._bookmarks.get(pattern, None)
             if not bmrev:
                 raise error.RepoLookupError(
-                    _("bookmark '%s' does not exist") % pattern
+                    _(b"bookmark '%s' does not exist") % pattern
                 )
             bms.add(repo[bmrev].rev())
         else:
@@ -587,7 +587,7 @@
     return subset & bms
 
 
-@predicate('branch(string or set)', safe=True, weight=10)
+@predicate(b'branch(string or set)', safe=True, weight=10)
 def branch(repo, subset, x):
     """
     All changesets belonging to the given branch or the branches of the given
@@ -605,26 +605,27 @@
             return repo[r].branch()
 
     try:
-        b = getstring(x, '')
+        b = getstring(x, b'')
     except error.ParseError:
         # not a string, but another revspec, e.g. tip()
         pass
     else:
         kind, pattern, matcher = stringutil.stringmatcher(b)
-        if kind == 'literal':
+        if kind == b'literal':
             # note: falls through to the revspec case if no branch with
             # this name exists and pattern kind is not specified explicitly
             if repo.branchmap().hasbranch(pattern):
                 return subset.filter(
-                    lambda r: matcher(getbranch(r)), condrepr=('<branch %r>', b)
+                    lambda r: matcher(getbranch(r)),
+                    condrepr=(b'<branch %r>', b),
                 )
-            if b.startswith('literal:'):
+            if b.startswith(b'literal:'):
                 raise error.RepoLookupError(
-                    _("branch '%s' does not exist") % pattern
+                    _(b"branch '%s' does not exist") % pattern
                 )
         else:
             return subset.filter(
-                lambda r: matcher(getbranch(r)), condrepr=('<branch %r>', b)
+                lambda r: matcher(getbranch(r)), condrepr=(b'<branch %r>', b)
             )
 
     s = getset(repo, fullreposet(repo), x)
@@ -634,11 +635,11 @@
     c = s.__contains__
     return subset.filter(
         lambda r: c(r) or getbranch(r) in b,
-        condrepr=lambda: '<branch %r>' % _sortedb(b),
+        condrepr=lambda: b'<branch %r>' % _sortedb(b),
     )
 
 
-@predicate('phasedivergent()', safe=True)
+@predicate(b'phasedivergent()', safe=True)
 def phasedivergent(repo, subset, x):
     """Mutable changesets marked as successors of public changesets.
 
@@ -646,12 +647,12 @@
     (EXPERIMENTAL)
     """
     # i18n: "phasedivergent" is a keyword
-    getargs(x, 0, 0, _("phasedivergent takes no arguments"))
-    phasedivergent = obsmod.getrevs(repo, 'phasedivergent')
+    getargs(x, 0, 0, _(b"phasedivergent takes no arguments"))
+    phasedivergent = obsmod.getrevs(repo, b'phasedivergent')
     return subset & phasedivergent
 
 
-@predicate('bundle()', safe=True)
+@predicate(b'bundle()', safe=True)
 def bundle(repo, subset, x):
     """Changesets in the bundle.
 
@@ -660,7 +661,7 @@
     try:
         bundlerevs = repo.changelog.bundlerevs
     except AttributeError:
-        raise error.Abort(_("no bundle provided - specify with -R"))
+        raise error.Abort(_(b"no bundle provided - specify with -R"))
     return subset & bundlerevs
 
 
@@ -671,7 +672,7 @@
     1: added
     2: removed
     """
-    hasset = matchmod.patkind(pat) == 'set'
+    hasset = matchmod.patkind(pat) == b'set'
 
     mcache = [None]
 
@@ -701,7 +702,7 @@
                 if m(f):
                     return True
 
-    return subset.filter(matches, condrepr=('<status[%r] %r>', field, pat))
+    return subset.filter(matches, condrepr=(b'<status[%r] %r>', field, pat))
 
 
 def _children(repo, subset, parentset):
@@ -722,7 +723,7 @@
     return baseset(cs)
 
 
-@predicate('children(set)', safe=True)
+@predicate(b'children(set)', safe=True)
 def children(repo, subset, x):
     """Child changesets of changesets in set.
     """
@@ -731,19 +732,19 @@
     return subset & cs
 
 
-@predicate('closed()', safe=True, weight=10)
+@predicate(b'closed()', safe=True, weight=10)
 def closed(repo, subset, x):
     """Changeset is closed.
     """
     # i18n: "closed" is a keyword
-    getargs(x, 0, 0, _("closed takes no arguments"))
+    getargs(x, 0, 0, _(b"closed takes no arguments"))
     return subset.filter(
-        lambda r: repo[r].closesbranch(), condrepr='<branch closed>'
+        lambda r: repo[r].closesbranch(), condrepr=b'<branch closed>'
     )
 
 
 # for internal use
-@predicate('_commonancestorheads(set)', safe=True)
+@predicate(b'_commonancestorheads(set)', safe=True)
 def _commonancestorheads(repo, subset, x):
     # This is an internal method is for quickly calculating "heads(::x and
     # ::y)"
@@ -756,7 +757,7 @@
     return subset & baseset(ancs)
 
 
-@predicate('commonancestors(set)', safe=True)
+@predicate(b'commonancestors(set)', safe=True)
 def commonancestors(repo, subset, x):
     """Changesets that are ancestors of every changeset in set.
     """
@@ -768,7 +769,7 @@
     return subset
 
 
-@predicate('contains(pattern)', weight=100)
+@predicate(b'contains(pattern)', weight=100)
 def contains(repo, subset, x):
     """The revision's manifest contains a file matching pattern (but might not
     modify it). See :hg:`help patterns` for information about file patterns.
@@ -778,7 +779,7 @@
     for efficiency.
     """
     # i18n: "contains" is a keyword
-    pat = getstring(x, _("contains requires a pattern"))
+    pat = getstring(x, _(b"contains requires a pattern"))
 
     def matches(x):
         if not matchmod.patkind(pat):
@@ -793,10 +794,10 @@
                     return True
         return False
 
-    return subset.filter(matches, condrepr=('<contains %r>', pat))
-
-
-@predicate('converted([id])', safe=True)
+    return subset.filter(matches, condrepr=(b'<contains %r>', pat))
+
+
+@predicate(b'converted([id])', safe=True)
 def converted(repo, subset, x):
     """Changesets converted from the given identifier in the old repository if
     present, or all converted changesets if no identifier is specified.
@@ -807,33 +808,33 @@
 
     rev = None
     # i18n: "converted" is a keyword
-    l = getargs(x, 0, 1, _('converted takes one or no arguments'))
+    l = getargs(x, 0, 1, _(b'converted takes one or no arguments'))
     if l:
         # i18n: "converted" is a keyword
-        rev = getstring(l[0], _('converted requires a revision'))
+        rev = getstring(l[0], _(b'converted requires a revision'))
 
     def _matchvalue(r):
-        source = repo[r].extra().get('convert_revision', None)
+        source = repo[r].extra().get(b'convert_revision', None)
         return source is not None and (rev is None or source.startswith(rev))
 
     return subset.filter(
-        lambda r: _matchvalue(r), condrepr=('<converted %r>', rev)
+        lambda r: _matchvalue(r), condrepr=(b'<converted %r>', rev)
     )
 
 
-@predicate('date(interval)', safe=True, weight=10)
+@predicate(b'date(interval)', safe=True, weight=10)
 def date(repo, subset, x):
     """Changesets within the interval, see :hg:`help dates`.
     """
     # i18n: "date" is a keyword
-    ds = getstring(x, _("date requires a string"))
+    ds = getstring(x, _(b"date requires a string"))
     dm = dateutil.matchdate(ds)
     return subset.filter(
-        lambda x: dm(repo[x].date()[0]), condrepr=('<date %r>', ds)
+        lambda x: dm(repo[x].date()[0]), condrepr=(b'<date %r>', ds)
     )
 
 
-@predicate('desc(string)', safe=True, weight=10)
+@predicate(b'desc(string)', safe=True, weight=10)
 def desc(repo, subset, x):
     """Search commit message for string. The match is case-insensitive.
 
@@ -841,12 +842,12 @@
     :hg:`help revisions.patterns`.
     """
     # i18n: "desc" is a keyword
-    ds = getstring(x, _("desc requires a string"))
+    ds = getstring(x, _(b"desc requires a string"))
 
     kind, pattern, matcher = _substringmatcher(ds, casesensitive=False)
 
     return subset.filter(
-        lambda r: matcher(repo[r].description()), condrepr=('<desc %r>', ds)
+        lambda r: matcher(repo[r].description()), condrepr=(b'<desc %r>', ds)
     )
 
 
@@ -860,7 +861,7 @@
     return subset & s
 
 
-@predicate('descendants(set[, depth])', safe=True)
+@predicate(b'descendants(set[, depth])', safe=True)
 def descendants(repo, subset, x):
     """Changesets which are descendants of changesets in set, including the
     given changesets themselves.
@@ -869,37 +870,39 @@
     the specified generation.
     """
     # startdepth is for internal use only until we can decide the UI
-    args = getargsdict(x, 'descendants', 'set depth startdepth')
-    if 'set' not in args:
+    args = getargsdict(x, b'descendants', b'set depth startdepth')
+    if b'set' not in args:
         # i18n: "descendants" is a keyword
-        raise error.ParseError(_('descendants takes at least 1 argument'))
+        raise error.ParseError(_(b'descendants takes at least 1 argument'))
     startdepth = stopdepth = None
-    if 'startdepth' in args:
+    if b'startdepth' in args:
         n = getinteger(
-            args['startdepth'], "descendants expects an integer startdepth"
+            args[b'startdepth'], b"descendants expects an integer startdepth"
         )
         if n < 0:
-            raise error.ParseError("negative startdepth")
+            raise error.ParseError(b"negative startdepth")
         startdepth = n
-    if 'depth' in args:
+    if b'depth' in args:
         # i18n: "descendants" is a keyword
-        n = getinteger(args['depth'], _("descendants expects an integer depth"))
+        n = getinteger(
+            args[b'depth'], _(b"descendants expects an integer depth")
+        )
         if n < 0:
-            raise error.ParseError(_("negative depth"))
+            raise error.ParseError(_(b"negative depth"))
         stopdepth = n + 1
     return _descendants(
-        repo, subset, args['set'], startdepth=startdepth, stopdepth=stopdepth
+        repo, subset, args[b'set'], startdepth=startdepth, stopdepth=stopdepth
     )
 
 
-@predicate('_firstdescendants', safe=True)
+@predicate(b'_firstdescendants', safe=True)
 def _firstdescendants(repo, subset, x):
     # ``_firstdescendants(set)``
     # Like ``descendants(set)`` but follows only the first parents.
     return _descendants(repo, subset, x, followfirst=True)
 
 
-@predicate('destination([set])', safe=True, weight=10)
+@predicate(b'destination([set])', safe=True, weight=10)
 def destination(repo, subset, x):
     """Changesets that were created by a graft, transplant or rebase operation,
     with the given revisions specified as the source.  Omitting the optional set
@@ -943,23 +946,23 @@
 
     return subset.filter(
         dests.__contains__,
-        condrepr=lambda: '<destination %r>' % _sortedb(dests),
+        condrepr=lambda: b'<destination %r>' % _sortedb(dests),
     )
 
 
-@predicate('contentdivergent()', safe=True)
+@predicate(b'contentdivergent()', safe=True)
 def contentdivergent(repo, subset, x):
     """
     Final successors of changesets with an alternative set of final
     successors. (EXPERIMENTAL)
     """
     # i18n: "contentdivergent" is a keyword
-    getargs(x, 0, 0, _("contentdivergent takes no arguments"))
-    contentdivergent = obsmod.getrevs(repo, 'contentdivergent')
+    getargs(x, 0, 0, _(b"contentdivergent takes no arguments"))
+    contentdivergent = obsmod.getrevs(repo, b'contentdivergent')
     return subset & contentdivergent
 
 
-@predicate('expectsize(set[, size])', safe=True, takeorder=True)
+@predicate(b'expectsize(set[, size])', safe=True, takeorder=True)
 def expectsize(repo, subset, x, order):
     """Return the given revset if size matches the revset size.
     Abort if the revset doesn't expect given size.
@@ -968,28 +971,28 @@
     For example, ``expectsize(0:1, 3:5)`` will abort as revset size is 2 and
     2 is not between 3 and 5 inclusive."""
 
-    args = getargsdict(x, 'expectsize', 'set size')
+    args = getargsdict(x, b'expectsize', b'set size')
     minsize = 0
     maxsize = len(repo) + 1
-    err = ''
-    if 'size' not in args or 'set' not in args:
-        raise error.ParseError(_('invalid set of arguments'))
+    err = b''
+    if b'size' not in args or b'set' not in args:
+        raise error.ParseError(_(b'invalid set of arguments'))
     minsize, maxsize = getintrange(
-        args['size'],
-        _('expectsize requires a size range' ' or a positive integer'),
-        _('size range bounds must be integers'),
+        args[b'size'],
+        _(b'expectsize requires a size range' b' or a positive integer'),
+        _(b'size range bounds must be integers'),
         minsize,
         maxsize,
     )
     if minsize < 0 or maxsize < 0:
-        raise error.ParseError(_('negative size'))
-    rev = getset(repo, fullreposet(repo), args['set'], order=order)
+        raise error.ParseError(_(b'negative size'))
+    rev = getset(repo, fullreposet(repo), args[b'set'], order=order)
     if minsize != maxsize and (len(rev) < minsize or len(rev) > maxsize):
         err = _(
-            'revset size mismatch.' ' expected between %d and %d, got %d'
+            b'revset size mismatch.' b' expected between %d and %d, got %d'
         ) % (minsize, maxsize, len(rev))
     elif minsize == maxsize and len(rev) != minsize:
-        err = _('revset size mismatch.' ' expected %d, got %d') % (
+        err = _(b'revset size mismatch.' b' expected %d, got %d') % (
             minsize,
             len(rev),
         )
@@ -1001,31 +1004,31 @@
         return rev & subset
 
 
-@predicate('extdata(source)', safe=False, weight=100)
+@predicate(b'extdata(source)', safe=False, weight=100)
 def extdata(repo, subset, x):
     """Changesets in the specified extdata source. (EXPERIMENTAL)"""
     # i18n: "extdata" is a keyword
-    args = getargsdict(x, 'extdata', 'source')
+    args = getargsdict(x, b'extdata', b'source')
     source = getstring(
-        args.get('source'),
+        args.get(b'source'),
         # i18n: "extdata" is a keyword
-        _('extdata takes at least 1 string argument'),
+        _(b'extdata takes at least 1 string argument'),
     )
     data = scmutil.extdatasource(repo, source)
     return subset & baseset(data)
 
 
-@predicate('extinct()', safe=True)
+@predicate(b'extinct()', safe=True)
 def extinct(repo, subset, x):
     """Obsolete changesets with obsolete descendants only.
     """
     # i18n: "extinct" is a keyword
-    getargs(x, 0, 0, _("extinct takes no arguments"))
-    extincts = obsmod.getrevs(repo, 'extinct')
+    getargs(x, 0, 0, _(b"extinct takes no arguments"))
+    extincts = obsmod.getrevs(repo, b'extinct')
     return subset & extincts
 
 
-@predicate('extra(label, [value])', safe=True)
+@predicate(b'extra(label, [value])', safe=True)
 def extra(repo, subset, x):
     """Changesets with the given label in the extra metadata, with the given
     optional value.
@@ -1033,20 +1036,20 @@
     Pattern matching is supported for `value`. See
     :hg:`help revisions.patterns`.
     """
-    args = getargsdict(x, 'extra', 'label value')
-    if 'label' not in args:
+    args = getargsdict(x, b'extra', b'label value')
+    if b'label' not in args:
         # i18n: "extra" is a keyword
-        raise error.ParseError(_('extra takes at least 1 argument'))
+        raise error.ParseError(_(b'extra takes at least 1 argument'))
     # i18n: "extra" is a keyword
     label = getstring(
-        args['label'], _('first argument to extra must be ' 'a string')
+        args[b'label'], _(b'first argument to extra must be ' b'a string')
     )
     value = None
 
-    if 'value' in args:
+    if b'value' in args:
         # i18n: "extra" is a keyword
         value = getstring(
-            args['value'], _('second argument to extra must be ' 'a string')
+            args[b'value'], _(b'second argument to extra must be ' b'a string')
         )
         kind, value, matcher = stringutil.stringmatcher(value)
 
@@ -1055,11 +1058,11 @@
         return label in extra and (value is None or matcher(extra[label]))
 
     return subset.filter(
-        lambda r: _matchvalue(r), condrepr=('<extra[%r] %r>', label, value)
+        lambda r: _matchvalue(r), condrepr=(b'<extra[%r] %r>', label, value)
     )
 
 
-@predicate('filelog(pattern)', safe=True)
+@predicate(b'filelog(pattern)', safe=True)
 def filelog(repo, subset, x):
     """Changesets connected to the specified filelog.
 
@@ -1074,7 +1077,7 @@
     """
 
     # i18n: "filelog" is a keyword
-    pat = getstring(x, _("filelog requires a pattern"))
+    pat = getstring(x, _(b"filelog requires a pattern"))
     s = set()
     cl = repo.changelog
 
@@ -1123,7 +1126,7 @@
     return subset & s
 
 
-@predicate('first(set, [n])', safe=True, takeorder=True, weight=0)
+@predicate(b'first(set, [n])', safe=True, takeorder=True, weight=0)
 def first(repo, subset, x, order):
     """An alias for limit().
     """
@@ -1131,52 +1134,53 @@
 
 
 def _follow(repo, subset, x, name, followfirst=False):
-    args = getargsdict(x, name, 'file startrev')
+    args = getargsdict(x, name, b'file startrev')
     revs = None
-    if 'startrev' in args:
-        revs = getset(repo, fullreposet(repo), args['startrev'])
-    if 'file' in args:
-        x = getstring(args['file'], _("%s expected a pattern") % name)
+    if b'startrev' in args:
+        revs = getset(repo, fullreposet(repo), args[b'startrev'])
+    if b'file' in args:
+        x = getstring(args[b'file'], _(b"%s expected a pattern") % name)
         if revs is None:
             revs = [None]
         fctxs = []
         for r in revs:
             ctx = mctx = repo[r]
             if r is None:
-                ctx = repo['.']
+                ctx = repo[b'.']
             m = matchmod.match(
-                repo.root, repo.getcwd(), [x], ctx=mctx, default='path'
+                repo.root, repo.getcwd(), [x], ctx=mctx, default=b'path'
             )
             fctxs.extend(ctx[f].introfilectx() for f in ctx.manifest().walk(m))
         s = dagop.filerevancestors(fctxs, followfirst)
     else:
         if revs is None:
-            revs = baseset([repo['.'].rev()])
+            revs = baseset([repo[b'.'].rev()])
         s = dagop.revancestors(repo, revs, followfirst)
 
     return subset & s
 
 
-@predicate('follow([file[, startrev]])', safe=True)
+@predicate(b'follow([file[, startrev]])', safe=True)
 def follow(repo, subset, x):
     """
     An alias for ``::.`` (ancestors of the working directory's first parent).
     If file pattern is specified, the histories of files matching given
     pattern in the revision given by startrev are followed, including copies.
     """
-    return _follow(repo, subset, x, 'follow')
-
-
-@predicate('_followfirst', safe=True)
+    return _follow(repo, subset, x, b'follow')
+
+
+@predicate(b'_followfirst', safe=True)
 def _followfirst(repo, subset, x):
     # ``followfirst([file[, startrev]])``
     # Like ``follow([file[, startrev]])`` but follows only the first parent
     # of every revisions or files revisions.
-    return _follow(repo, subset, x, '_followfirst', followfirst=True)
+    return _follow(repo, subset, x, b'_followfirst', followfirst=True)
 
 
 @predicate(
-    'followlines(file, fromline:toline[, startrev=., descend=False])', safe=True
+    b'followlines(file, fromline:toline[, startrev=., descend=False])',
+    safe=True,
 )
 def followlines(repo, subset, x):
     """Changesets modifying `file` in line range ('fromline', 'toline').
@@ -1189,40 +1193,40 @@
     descendants of 'startrev' are returned though renames are (currently) not
     followed in this direction.
     """
-    args = getargsdict(x, 'followlines', 'file *lines startrev descend')
-    if len(args['lines']) != 1:
-        raise error.ParseError(_("followlines requires a line range"))
-
-    rev = '.'
-    if 'startrev' in args:
-        revs = getset(repo, fullreposet(repo), args['startrev'])
+    args = getargsdict(x, b'followlines', b'file *lines startrev descend')
+    if len(args[b'lines']) != 1:
+        raise error.ParseError(_(b"followlines requires a line range"))
+
+    rev = b'.'
+    if b'startrev' in args:
+        revs = getset(repo, fullreposet(repo), args[b'startrev'])
         if len(revs) != 1:
             raise error.ParseError(
                 # i18n: "followlines" is a keyword
-                _("followlines expects exactly one revision")
+                _(b"followlines expects exactly one revision")
             )
         rev = revs.last()
 
-    pat = getstring(args['file'], _("followlines requires a pattern"))
+    pat = getstring(args[b'file'], _(b"followlines requires a pattern"))
     # i18n: "followlines" is a keyword
-    msg = _("followlines expects exactly one file")
+    msg = _(b"followlines expects exactly one file")
     fname = scmutil.parsefollowlinespattern(repo, rev, pat, msg)
     fromline, toline = util.processlinerange(
         *getintrange(
-            args['lines'][0],
+            args[b'lines'][0],
             # i18n: "followlines" is a keyword
-            _("followlines expects a line number or a range"),
-            _("line range bounds must be integers"),
+            _(b"followlines expects a line number or a range"),
+            _(b"line range bounds must be integers"),
         )
     )
 
     fctx = repo[rev].filectx(fname)
     descend = False
-    if 'descend' in args:
+    if b'descend' in args:
         descend = getboolean(
-            args['descend'],
+            args[b'descend'],
             # i18n: "descend" is a keyword
-            _("descend argument must be a boolean"),
+            _(b"descend argument must be a boolean"),
         )
     if descend:
         rs = generatorset(
@@ -1247,16 +1251,16 @@
     return subset & rs
 
 
-@predicate('all()', safe=True)
+@predicate(b'all()', safe=True)
 def getall(repo, subset, x):
     """All changesets, the same as ``0:tip``.
     """
     # i18n: "all" is a keyword
-    getargs(x, 0, 0, _("all takes no arguments"))
+    getargs(x, 0, 0, _(b"all takes no arguments"))
     return subset & spanset(repo)  # drop "null" if any
 
 
-@predicate('grep(regex)', weight=10)
+@predicate(b'grep(regex)', weight=10)
 def grep(repo, subset, x):
     """Like ``keyword(string)`` but accepts a regex. Use ``grep(r'...')``
     to ensure special escape characters are handled correctly. Unlike
@@ -1264,10 +1268,10 @@
     """
     try:
         # i18n: "grep" is a keyword
-        gr = re.compile(getstring(x, _("grep requires a string")))
+        gr = re.compile(getstring(x, _(b"grep requires a string")))
     except re.error as e:
         raise error.ParseError(
-            _('invalid match pattern: %s') % stringutil.forcebytestr(e)
+            _(b'invalid match pattern: %s') % stringutil.forcebytestr(e)
         )
 
     def matches(x):
@@ -1277,10 +1281,10 @@
                 return True
         return False
 
-    return subset.filter(matches, condrepr=('<grep %r>', gr.pattern))
-
-
-@predicate('_matchfiles', safe=True)
+    return subset.filter(matches, condrepr=(b'<grep %r>', gr.pattern))
+
+
+@predicate(b'_matchfiles', safe=True)
 def _matchfiles(repo, subset, x):
     # _matchfiles takes a revset list of prefixed arguments:
     #
@@ -1294,38 +1298,38 @@
     # initialized. Use 'd:' to set the default matching mode, default
     # to 'glob'. At most one 'r:' and 'd:' argument can be passed.
 
-    l = getargs(x, 1, -1, "_matchfiles requires at least one argument")
+    l = getargs(x, 1, -1, b"_matchfiles requires at least one argument")
     pats, inc, exc = [], [], []
     rev, default = None, None
     for arg in l:
-        s = getstring(arg, "_matchfiles requires string arguments")
+        s = getstring(arg, b"_matchfiles requires string arguments")
         prefix, value = s[:2], s[2:]
-        if prefix == 'p:':
+        if prefix == b'p:':
             pats.append(value)
-        elif prefix == 'i:':
+        elif prefix == b'i:':
             inc.append(value)
-        elif prefix == 'x:':
+        elif prefix == b'x:':
             exc.append(value)
-        elif prefix == 'r:':
+        elif prefix == b'r:':
             if rev is not None:
                 raise error.ParseError(
-                    '_matchfiles expected at most one ' 'revision'
+                    b'_matchfiles expected at most one ' b'revision'
                 )
-            if value == '':  # empty means working directory
+            if value == b'':  # empty means working directory
                 rev = node.wdirrev
             else:
                 rev = value
-        elif prefix == 'd:':
+        elif prefix == b'd:':
             if default is not None:
                 raise error.ParseError(
-                    '_matchfiles expected at most one ' 'default mode'
+                    b'_matchfiles expected at most one ' b'default mode'
                 )
             default = value
         else:
-            raise error.ParseError('invalid _matchfiles prefix: %s' % prefix)
+            raise error.ParseError(b'invalid _matchfiles prefix: %s' % prefix)
     if not default:
-        default = 'glob'
-    hasset = any(matchmod.patkind(p) == 'set' for p in pats + inc + exc)
+        default = b'glob'
+    hasset = any(matchmod.patkind(p) == b'set' for p in pats + inc + exc)
 
     mcache = [None]
 
@@ -1361,8 +1365,8 @@
     return subset.filter(
         matches,
         condrepr=(
-            '<matchfiles patterns=%r, include=%r '
-            'exclude=%r, default=%r, rev=%r>',
+            b'<matchfiles patterns=%r, include=%r '
+            b'exclude=%r, default=%r, rev=%r>',
             pats,
             inc,
             exc,
@@ -1372,7 +1376,7 @@
     )
 
 
-@predicate('file(pattern)', safe=True, weight=10)
+@predicate(b'file(pattern)', safe=True, weight=10)
 def hasfile(repo, subset, x):
     """Changesets affecting files matched by pattern.
 
@@ -1382,16 +1386,16 @@
     This predicate uses ``glob:`` as the default kind of pattern.
     """
     # i18n: "file" is a keyword
-    pat = getstring(x, _("file requires a pattern"))
-    return _matchfiles(repo, subset, ('string', 'p:' + pat))
-
-
-@predicate('head()', safe=True)
+    pat = getstring(x, _(b"file requires a pattern"))
+    return _matchfiles(repo, subset, (b'string', b'p:' + pat))
+
+
+@predicate(b'head()', safe=True)
 def head(repo, subset, x):
     """Changeset is a named branch head.
     """
     # i18n: "head" is a keyword
-    getargs(x, 0, 0, _("head takes no arguments"))
+    getargs(x, 0, 0, _(b"head takes no arguments"))
     hs = set()
     cl = repo.changelog
     for ls in repo.branchmap().iterheads():
@@ -1399,7 +1403,7 @@
     return subset & baseset(hs)
 
 
-@predicate('heads(set)', safe=True, takeorder=True)
+@predicate(b'heads(set)', safe=True, takeorder=True)
 def heads(repo, subset, x, order):
     """Members of set with no children in set.
     """
@@ -1421,17 +1425,17 @@
     return subset & heads
 
 
-@predicate('hidden()', safe=True)
+@predicate(b'hidden()', safe=True)
 def hidden(repo, subset, x):
     """Hidden changesets.
     """
     # i18n: "hidden" is a keyword
-    getargs(x, 0, 0, _("hidden takes no arguments"))
-    hiddenrevs = repoview.filterrevs(repo, 'visible')
+    getargs(x, 0, 0, _(b"hidden takes no arguments"))
+    hiddenrevs = repoview.filterrevs(repo, b'visible')
     return subset & hiddenrevs
 
 
-@predicate('keyword(string)', safe=True, weight=10)
+@predicate(b'keyword(string)', safe=True, weight=10)
 def keyword(repo, subset, x):
     """Search commit message, user name, and names of changed files for
     string. The match is case-insensitive.
@@ -1440,7 +1444,7 @@
     ``grep(regex)``.
     """
     # i18n: "keyword" is a keyword
-    kw = encoding.lower(getstring(x, _("keyword requires a string")))
+    kw = encoding.lower(getstring(x, _(b"keyword requires a string")))
 
     def matches(r):
         c = repo[r]
@@ -1449,44 +1453,46 @@
             for t in c.files() + [c.user(), c.description()]
         )
 
-    return subset.filter(matches, condrepr=('<keyword %r>', kw))
-
-
-@predicate('limit(set[, n[, offset]])', safe=True, takeorder=True, weight=0)
+    return subset.filter(matches, condrepr=(b'<keyword %r>', kw))
+
+
+@predicate(b'limit(set[, n[, offset]])', safe=True, takeorder=True, weight=0)
 def limit(repo, subset, x, order):
     """First n members of set, defaulting to 1, starting from offset.
     """
-    args = getargsdict(x, 'limit', 'set n offset')
-    if 'set' not in args:
+    args = getargsdict(x, b'limit', b'set n offset')
+    if b'set' not in args:
         # i18n: "limit" is a keyword
-        raise error.ParseError(_("limit requires one to three arguments"))
+        raise error.ParseError(_(b"limit requires one to three arguments"))
     # i18n: "limit" is a keyword
-    lim = getinteger(args.get('n'), _("limit expects a number"), default=1)
+    lim = getinteger(args.get(b'n'), _(b"limit expects a number"), default=1)
     if lim < 0:
-        raise error.ParseError(_("negative number to select"))
+        raise error.ParseError(_(b"negative number to select"))
     # i18n: "limit" is a keyword
-    ofs = getinteger(args.get('offset'), _("limit expects a number"), default=0)
+    ofs = getinteger(
+        args.get(b'offset'), _(b"limit expects a number"), default=0
+    )
     if ofs < 0:
-        raise error.ParseError(_("negative offset"))
-    os = getset(repo, fullreposet(repo), args['set'])
+        raise error.ParseError(_(b"negative offset"))
+    os = getset(repo, fullreposet(repo), args[b'set'])
     ls = os.slice(ofs, ofs + lim)
     if order == followorder and lim > 1:
         return subset & ls
     return ls & subset
 
 
-@predicate('last(set, [n])', safe=True, takeorder=True)
+@predicate(b'last(set, [n])', safe=True, takeorder=True)
 def last(repo, subset, x, order):
     """Last n members of set, defaulting to 1.
     """
     # i18n: "last" is a keyword
-    l = getargs(x, 1, 2, _("last requires one or two arguments"))
+    l = getargs(x, 1, 2, _(b"last requires one or two arguments"))
     lim = 1
     if len(l) == 2:
         # i18n: "last" is a keyword
-        lim = getinteger(l[1], _("last expects a number"))
+        lim = getinteger(l[1], _(b"last expects a number"))
     if lim < 0:
-        raise error.ParseError(_("negative number to select"))
+        raise error.ParseError(_(b"negative number to select"))
     os = getset(repo, fullreposet(repo), l[0])
     os.reverse()
     ls = os.slice(0, lim)
@@ -1496,7 +1502,7 @@
     return ls & subset
 
 
-@predicate('max(set)', safe=True)
+@predicate(b'max(set)', safe=True)
 def maxrev(repo, subset, x):
     """Changeset with highest revision number in set.
     """
@@ -1504,20 +1510,20 @@
     try:
         m = os.max()
         if m in subset:
-            return baseset([m], datarepr=('<max %r, %r>', subset, os))
+            return baseset([m], datarepr=(b'<max %r, %r>', subset, os))
     except ValueError:
         # os.max() throws a ValueError when the collection is empty.
         # Same as python's max().
         pass
-    return baseset(datarepr=('<max %r, %r>', subset, os))
-
-
-@predicate('merge()', safe=True)
+    return baseset(datarepr=(b'<max %r, %r>', subset, os))
+
+
+@predicate(b'merge()', safe=True)
 def merge(repo, subset, x):
     """Changeset is a merge changeset.
     """
     # i18n: "merge" is a keyword
-    getargs(x, 0, 0, _("merge takes no arguments"))
+    getargs(x, 0, 0, _(b"merge takes no arguments"))
     cl = repo.changelog
     nullrev = node.nullrev
 
@@ -1527,15 +1533,15 @@
         except error.WdirUnsupported:
             return bool(repo[r].p2())
 
-    return subset.filter(ismerge, condrepr='<merge>')
-
-
-@predicate('branchpoint()', safe=True)
+    return subset.filter(ismerge, condrepr=b'<merge>')
+
+
+@predicate(b'branchpoint()', safe=True)
 def branchpoint(repo, subset, x):
     """Changesets with more than one child.
     """
     # i18n: "branchpoint" is a keyword
-    getargs(x, 0, 0, _("branchpoint takes no arguments"))
+    getargs(x, 0, 0, _(b"branchpoint takes no arguments"))
     cl = repo.changelog
     if not subset:
         return baseset()
@@ -1548,11 +1554,11 @@
             if p >= baserev:
                 parentscount[p - baserev] += 1
     return subset.filter(
-        lambda r: parentscount[r - baserev] > 1, condrepr='<branchpoint>'
+        lambda r: parentscount[r - baserev] > 1, condrepr=b'<branchpoint>'
     )
 
 
-@predicate('min(set)', safe=True)
+@predicate(b'min(set)', safe=True)
 def minrev(repo, subset, x):
     """Changeset with lowest revision number in set.
     """
@@ -1560,15 +1566,15 @@
     try:
         m = os.min()
         if m in subset:
-            return baseset([m], datarepr=('<min %r, %r>', subset, os))
+            return baseset([m], datarepr=(b'<min %r, %r>', subset, os))
     except ValueError:
         # os.min() throws a ValueError when the collection is empty.
         # Same as python's min().
         pass
-    return baseset(datarepr=('<min %r, %r>', subset, os))
-
-
-@predicate('modifies(pattern)', safe=True, weight=30)
+    return baseset(datarepr=(b'<min %r, %r>', subset, os))
+
+
+@predicate(b'modifies(pattern)', safe=True, weight=30)
 def modifies(repo, subset, x):
     """Changesets modifying files matched by pattern.
 
@@ -1577,11 +1583,11 @@
     directory.
     """
     # i18n: "modifies" is a keyword
-    pat = getstring(x, _("modifies requires a pattern"))
+    pat = getstring(x, _(b"modifies requires a pattern"))
     return checkstatus(repo, subset, pat, 0)
 
 
-@predicate('named(namespace)')
+@predicate(b'named(namespace)')
 def named(repo, subset, x):
     """The changesets in a given namespace.
 
@@ -1589,18 +1595,20 @@
     :hg:`help revisions.patterns`.
     """
     # i18n: "named" is a keyword
-    args = getargs(x, 1, 1, _('named requires a namespace argument'))
+    args = getargs(x, 1, 1, _(b'named requires a namespace argument'))
 
     ns = getstring(
         args[0],
         # i18n: "named" is a keyword
-        _('the argument to named must be a string'),
+        _(b'the argument to named must be a string'),
     )
     kind, pattern, matcher = stringutil.stringmatcher(ns)
     namespaces = set()
-    if kind == 'literal':
+    if kind == b'literal':
         if pattern not in repo.names:
-            raise error.RepoLookupError(_("namespace '%s' does not exist") % ns)
+            raise error.RepoLookupError(
+                _(b"namespace '%s' does not exist") % ns
+            )
         namespaces.add(repo.names[pattern])
     else:
         for name, ns in repo.names.iteritems():
@@ -1617,14 +1625,14 @@
     return subset & names
 
 
-@predicate('id(string)', safe=True)
+@predicate(b'id(string)', safe=True)
 def node_(repo, subset, x):
     """Revision non-ambiguously specified by the given hex string prefix.
     """
     # i18n: "id" is a keyword
-    l = getargs(x, 1, 1, _("id requires one argument"))
+    l = getargs(x, 1, 1, _(b"id requires one argument"))
     # i18n: "id" is a keyword
-    n = getstring(l[0], _("id requires a string"))
+    n = getstring(l[0], _(b"id requires a string"))
     if len(n) == 40:
         try:
             rn = repo.changelog.rev(node.bin(n))
@@ -1649,25 +1657,25 @@
     return result & subset
 
 
-@predicate('none()', safe=True)
+@predicate(b'none()', safe=True)
 def none(repo, subset, x):
     """No changesets.
     """
     # i18n: "none" is a keyword
-    getargs(x, 0, 0, _("none takes no arguments"))
+    getargs(x, 0, 0, _(b"none takes no arguments"))
     return baseset()
 
 
-@predicate('obsolete()', safe=True)
+@predicate(b'obsolete()', safe=True)
 def obsolete(repo, subset, x):
     """Mutable changeset with a newer version."""
     # i18n: "obsolete" is a keyword
-    getargs(x, 0, 0, _("obsolete takes no arguments"))
-    obsoletes = obsmod.getrevs(repo, 'obsolete')
+    getargs(x, 0, 0, _(b"obsolete takes no arguments"))
+    obsoletes = obsmod.getrevs(repo, b'obsolete')
     return subset & obsoletes
 
 
-@predicate('only(set, [set])', safe=True)
+@predicate(b'only(set, [set])', safe=True)
 def only(repo, subset, x):
     """Changesets that are ancestors of the first set that are not ancestors
     of any other head in the repo. If a second set is specified, the result
@@ -1676,7 +1684,7 @@
     """
     cl = repo.changelog
     # i18n: "only" is a keyword
-    args = getargs(x, 1, 2, _('only takes one or two arguments'))
+    args = getargs(x, 1, 2, _(b'only takes one or two arguments'))
     include = getset(repo, fullreposet(repo), args[0])
     if len(args) == 1:
         if not include:
@@ -1697,7 +1705,7 @@
     return subset & results
 
 
-@predicate('origin([set])', safe=True)
+@predicate(b'origin([set])', safe=True)
 def origin(repo, subset, x):
     """
     Changesets that were specified as a source for the grafts, transplants or
@@ -1730,7 +1738,7 @@
     return subset & o
 
 
-@predicate('outgoing([path])', safe=False, weight=10)
+@predicate(b'outgoing([path])', safe=False, weight=10)
 def outgoing(repo, subset, x):
     """Changesets not found in the specified destination repository, or the
     default push location.
@@ -1742,17 +1750,19 @@
     )
 
     # i18n: "outgoing" is a keyword
-    l = getargs(x, 0, 1, _("outgoing takes one or no arguments"))
+    l = getargs(x, 0, 1, _(b"outgoing takes one or no arguments"))
     # i18n: "outgoing" is a keyword
-    dest = l and getstring(l[0], _("outgoing requires a repository path")) or ''
+    dest = (
+        l and getstring(l[0], _(b"outgoing requires a repository path")) or b''
+    )
     if not dest:
         # ui.paths.getpath() explicitly tests for None, not just a boolean
         dest = None
-    path = repo.ui.paths.getpath(dest, default=('default-push', 'default'))
+    path = repo.ui.paths.getpath(dest, default=(b'default-push', b'default'))
     if not path:
         raise error.Abort(
-            _('default repository not configured!'),
-            hint=_("see 'hg help config.paths'"),
+            _(b'default repository not configured!'),
+            hint=_(b"see 'hg help config.paths'"),
         )
     dest = path.pushloc or path.loc
     branches = path.branch, []
@@ -1769,7 +1779,7 @@
     return subset & o
 
 
-@predicate('p1([set])', safe=True)
+@predicate(b'p1([set])', safe=True)
 def p1(repo, subset, x):
     """First parent of changesets in set, or the working directory.
     """
@@ -1792,7 +1802,7 @@
     return subset & ps
 
 
-@predicate('p2([set])', safe=True)
+@predicate(b'p2([set])', safe=True)
 def p2(repo, subset, x):
     """Second parent of changesets in set, or the working directory.
     """
@@ -1825,7 +1835,7 @@
     return p1(repo, subset, x)
 
 
-@predicate('parents([set])', safe=True)
+@predicate(b'parents([set])', safe=True)
 def parents(repo, subset, x):
     """
     The set of all parents for all changesets in set, or the working directory.
@@ -1851,32 +1861,32 @@
     return repo._phasecache.getrevset(repo, targets, subset)
 
 
-@predicate('_phase(idx)', safe=True)
+@predicate(b'_phase(idx)', safe=True)
 def phase(repo, subset, x):
-    l = getargs(x, 1, 1, "_phase requires one argument")
-    target = getinteger(l[0], "_phase expects a number")
+    l = getargs(x, 1, 1, b"_phase requires one argument")
+    target = getinteger(l[0], b"_phase expects a number")
     return _phase(repo, subset, target)
 
 
-@predicate('draft()', safe=True)
+@predicate(b'draft()', safe=True)
 def draft(repo, subset, x):
     """Changeset in draft phase."""
     # i18n: "draft" is a keyword
-    getargs(x, 0, 0, _("draft takes no arguments"))
+    getargs(x, 0, 0, _(b"draft takes no arguments"))
     target = phases.draft
     return _phase(repo, subset, target)
 
 
-@predicate('secret()', safe=True)
+@predicate(b'secret()', safe=True)
 def secret(repo, subset, x):
     """Changeset in secret phase."""
     # i18n: "secret" is a keyword
-    getargs(x, 0, 0, _("secret takes no arguments"))
+    getargs(x, 0, 0, _(b"secret takes no arguments"))
     target = phases.secret
     return _phase(repo, subset, target)
 
 
-@predicate('stack([revs])', safe=True)
+@predicate(b'stack([revs])', safe=True)
 def stack(repo, subset, x):
     """Experimental revset for the stack of changesets or working directory
     parent. (EXPERIMENTAL)
@@ -1903,7 +1913,7 @@
         if n not in (0, 1, 2):
             raise ValueError
     except (TypeError, ValueError):
-        raise error.ParseError(_("^ expects a number 0, 1, or 2"))
+        raise error.ParseError(_(b"^ expects a number 0, 1, or 2"))
     ps = set()
     cl = repo.changelog
     for r in getset(repo, fullreposet(repo), x):
@@ -1926,7 +1936,7 @@
     return subset & ps
 
 
-@predicate('present(set)', safe=True, takeorder=True)
+@predicate(b'present(set)', safe=True, takeorder=True)
 def present(repo, subset, x, order):
     """An empty set, if any revision in set isn't found; otherwise,
     all revisions in set.
@@ -1942,30 +1952,30 @@
 
 
 # for internal use
-@predicate('_notpublic', safe=True)
+@predicate(b'_notpublic', safe=True)
 def _notpublic(repo, subset, x):
-    getargs(x, 0, 0, "_notpublic takes no arguments")
+    getargs(x, 0, 0, b"_notpublic takes no arguments")
     return _phase(repo, subset, phases.draft, phases.secret)
 
 
 # for internal use
-@predicate('_phaseandancestors(phasename, set)', safe=True)
+@predicate(b'_phaseandancestors(phasename, set)', safe=True)
 def _phaseandancestors(repo, subset, x):
     # equivalent to (phasename() & ancestors(set)) but more efficient
     # phasename could be one of 'draft', 'secret', or '_notpublic'
-    args = getargs(x, 2, 2, "_phaseandancestors requires two arguments")
+    args = getargs(x, 2, 2, b"_phaseandancestors requires two arguments")
     phasename = getsymbol(args[0])
     s = getset(repo, fullreposet(repo), args[1])
 
     draft = phases.draft
     secret = phases.secret
     phasenamemap = {
-        '_notpublic': draft,
-        'draft': draft,  # follow secret's ancestors
-        'secret': secret,
+        b'_notpublic': draft,
+        b'draft': draft,  # follow secret's ancestors
+        b'secret': secret,
     }
     if phasename not in phasenamemap:
-        raise error.ParseError('%r is not a valid phasename' % phasename)
+        raise error.ParseError(b'%r is not a valid phasename' % phasename)
 
     minimalphase = phasenamemap[phasename]
     getphase = repo._phasecache.phase
@@ -1975,20 +1985,20 @@
 
     revs = dagop.revancestors(repo, s, cutfunc=cutfunc)
 
-    if phasename == 'draft':  # need to remove secret changesets
+    if phasename == b'draft':  # need to remove secret changesets
         revs = revs.filter(lambda r: getphase(repo, r) == draft)
     return subset & revs
 
 
-@predicate('public()', safe=True)
+@predicate(b'public()', safe=True)
 def public(repo, subset, x):
     """Changeset in public phase."""
     # i18n: "public" is a keyword
-    getargs(x, 0, 0, _("public takes no arguments"))
+    getargs(x, 0, 0, _(b"public takes no arguments"))
     return _phase(repo, subset, phases.public)
 
 
-@predicate('remote([id [,path]])', safe=False)
+@predicate(b'remote([id [,path]])', safe=False)
 def remote(repo, subset, x):
     """Local revision that corresponds to the given identifier in a
     remote repository, if present. Here, the '.' identifier is a
@@ -1998,20 +2008,20 @@
     from . import hg  # avoid start-up nasties
 
     # i18n: "remote" is a keyword
-    l = getargs(x, 0, 2, _("remote takes zero, one, or two arguments"))
-
-    q = '.'
+    l = getargs(x, 0, 2, _(b"remote takes zero, one, or two arguments"))
+
+    q = b'.'
     if len(l) > 0:
         # i18n: "remote" is a keyword
-        q = getstring(l[0], _("remote requires a string id"))
-    if q == '.':
-        q = repo['.'].branch()
-
-    dest = ''
+        q = getstring(l[0], _(b"remote requires a string id"))
+    if q == b'.':
+        q = repo[b'.'].branch()
+
+    dest = b''
     if len(l) > 1:
         # i18n: "remote" is a keyword
-        dest = getstring(l[1], _("remote requires a repository path"))
-    dest = repo.ui.expandpath(dest or 'default')
+        dest = getstring(l[1], _(b"remote requires a repository path"))
+    dest = repo.ui.expandpath(dest or b'default')
     dest, branches = hg.parseurl(dest)
     revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
     if revs:
@@ -2025,7 +2035,7 @@
     return baseset()
 
 
-@predicate('removes(pattern)', safe=True, weight=30)
+@predicate(b'removes(pattern)', safe=True, weight=30)
 def removes(repo, subset, x):
     """Changesets which remove files matching pattern.
 
@@ -2034,43 +2044,43 @@
     directory.
     """
     # i18n: "removes" is a keyword
-    pat = getstring(x, _("removes requires a pattern"))
+    pat = getstring(x, _(b"removes requires a pattern"))
     return checkstatus(repo, subset, pat, 2)
 
 
-@predicate('rev(number)', safe=True)
+@predicate(b'rev(number)', safe=True)
 def rev(repo, subset, x):
     """Revision with the given numeric identifier.
     """
     # i18n: "rev" is a keyword
-    l = getargs(x, 1, 1, _("rev requires one argument"))
+    l = getargs(x, 1, 1, _(b"rev requires one argument"))
     try:
         # i18n: "rev" is a keyword
-        l = int(getstring(l[0], _("rev requires a number")))
+        l = int(getstring(l[0], _(b"rev requires a number")))
     except (TypeError, ValueError):
         # i18n: "rev" is a keyword
-        raise error.ParseError(_("rev expects a number"))
+        raise error.ParseError(_(b"rev expects a number"))
     if l not in repo.changelog and l not in _virtualrevs:
         return baseset()
     return subset & baseset([l])
 
 
-@predicate('_rev(number)', safe=True)
+@predicate(b'_rev(number)', safe=True)
 def _rev(repo, subset, x):
     # internal version of "rev(x)" that raise error if "x" is invalid
     # i18n: "rev" is a keyword
-    l = getargs(x, 1, 1, _("rev requires one argument"))
+    l = getargs(x, 1, 1, _(b"rev requires one argument"))
     try:
         # i18n: "rev" is a keyword
-        l = int(getstring(l[0], _("rev requires a number")))
+        l = int(getstring(l[0], _(b"rev requires a number")))
     except (TypeError, ValueError):
         # i18n: "rev" is a keyword
-        raise error.ParseError(_("rev expects a number"))
+        raise error.ParseError(_(b"rev expects a number"))
     repo.changelog.node(l)  # check that the rev exists
     return subset & baseset([l])
 
 
-@predicate('revset(set)', safe=True, takeorder=True)
+@predicate(b'revset(set)', safe=True, takeorder=True)
 def revsetpredicate(repo, subset, x, order):
     """Strictly interpret the content as a revset.
 
@@ -2081,7 +2091,7 @@
     return getset(repo, subset, x, order)
 
 
-@predicate('matching(revision [, field])', safe=True)
+@predicate(b'matching(revision [, field])', safe=True)
 def matching(repo, subset, x):
     """Changesets in which a given set of fields match the set of fields in the
     selected revision or set.
@@ -2107,16 +2117,16 @@
     specified. You can match more than one field at a time.
     """
     # i18n: "matching" is a keyword
-    l = getargs(x, 1, 2, _("matching takes 1 or 2 arguments"))
+    l = getargs(x, 1, 2, _(b"matching takes 1 or 2 arguments"))
 
     revs = getset(repo, fullreposet(repo), l[0])
 
-    fieldlist = ['metadata']
+    fieldlist = [b'metadata']
     if len(l) > 1:
         fieldlist = getstring(
             l[1],
             # i18n: "matching" is a keyword
-            _("matching requires a string " "as its second argument"),
+            _(b"matching requires a string " b"as its second argument"),
         ).split()
 
     # Make sure that there are no repeated fields,
@@ -2124,36 +2134,36 @@
     # and check the 'files' whenever we check the 'diff'
     fields = []
     for field in fieldlist:
-        if field == 'metadata':
-            fields += ['user', 'description', 'date']
-        elif field == 'diff':
+        if field == b'metadata':
+            fields += [b'user', b'description', b'date']
+        elif field == b'diff':
             # a revision matching the diff must also match the files
             # since matching the diff is very costly, make sure to
             # also match the files first
-            fields += ['files', 'diff']
+            fields += [b'files', b'diff']
         else:
-            if field == 'author':
-                field = 'user'
+            if field == b'author':
+                field = b'user'
             fields.append(field)
     fields = set(fields)
-    if 'summary' in fields and 'description' in fields:
+    if b'summary' in fields and b'description' in fields:
         # If a revision matches its description it also matches its summary
-        fields.discard('summary')
+        fields.discard(b'summary')
 
     # We may want to match more than one field
     # Not all fields take the same amount of time to be matched
     # Sort the selected fields in order of increasing matching cost
     fieldorder = [
-        'phase',
-        'parents',
-        'user',
-        'date',
-        'branch',
-        'summary',
-        'files',
-        'description',
-        'substate',
-        'diff',
+        b'phase',
+        b'parents',
+        b'user',
+        b'date',
+        b'branch',
+        b'summary',
+        b'files',
+        b'description',
+        b'substate',
+        b'diff',
     ]
 
     def fieldkeyfunc(f):
@@ -2170,17 +2180,17 @@
     # which will be added to the getfieldfuncs array of functions
     getfieldfuncs = []
     _funcs = {
-        'user': lambda r: repo[r].user(),
-        'branch': lambda r: repo[r].branch(),
-        'date': lambda r: repo[r].date(),
-        'description': lambda r: repo[r].description(),
-        'files': lambda r: repo[r].files(),
-        'parents': lambda r: repo[r].parents(),
-        'phase': lambda r: repo[r].phase(),
-        'substate': lambda r: repo[r].substate,
-        'summary': lambda r: repo[r].description().splitlines()[0],
-        'diff': lambda r: list(
-            repo[r].diff(opts=diffutil.diffallopts(repo.ui, {'git': True}))
+        b'user': lambda r: repo[r].user(),
+        b'branch': lambda r: repo[r].branch(),
+        b'date': lambda r: repo[r].date(),
+        b'description': lambda r: repo[r].description(),
+        b'files': lambda r: repo[r].files(),
+        b'parents': lambda r: repo[r].parents(),
+        b'phase': lambda r: repo[r].phase(),
+        b'substate': lambda r: repo[r].substate,
+        b'summary': lambda r: repo[r].description().splitlines()[0],
+        b'diff': lambda r: list(
+            repo[r].diff(opts=diffutil.diffallopts(repo.ui, {b'git': True}))
         ),
     }
     for info in fields:
@@ -2188,7 +2198,7 @@
         if getfield is None:
             raise error.ParseError(
                 # i18n: "matching" is a keyword
-                _("unexpected field name passed to matching: %s")
+                _(b"unexpected field name passed to matching: %s")
                 % info
             )
         getfieldfuncs.append(getfield)
@@ -2208,10 +2218,10 @@
                 return True
         return False
 
-    return subset.filter(matches, condrepr=('<matching%r %r>', fields, revs))
-
-
-@predicate('reverse(set)', safe=True, takeorder=True, weight=0)
+    return subset.filter(matches, condrepr=(b'<matching%r %r>', fields, revs))
+
+
+@predicate(b'reverse(set)', safe=True, takeorder=True, weight=0)
 def reverse(repo, subset, x, order):
     """Reverse order of set.
     """
@@ -2221,7 +2231,7 @@
     return l
 
 
-@predicate('roots(set)', safe=True)
+@predicate(b'roots(set)', safe=True)
 def roots(repo, subset, x):
     """Changesets in set with no parent changeset in set.
     """
@@ -2234,66 +2244,66 @@
                 return False
         return True
 
-    return subset & s.filter(filter, condrepr='<roots>')
+    return subset & s.filter(filter, condrepr=b'<roots>')
 
 
 _sortkeyfuncs = {
-    'rev': lambda c: c.rev(),
-    'branch': lambda c: c.branch(),
-    'desc': lambda c: c.description(),
-    'user': lambda c: c.user(),
-    'author': lambda c: c.user(),
-    'date': lambda c: c.date()[0],
+    b'rev': lambda c: c.rev(),
+    b'branch': lambda c: c.branch(),
+    b'desc': lambda c: c.description(),
+    b'user': lambda c: c.user(),
+    b'author': lambda c: c.user(),
+    b'date': lambda c: c.date()[0],
 }
 
 
 def _getsortargs(x):
     """Parse sort options into (set, [(key, reverse)], opts)"""
-    args = getargsdict(x, 'sort', 'set keys topo.firstbranch')
-    if 'set' not in args:
+    args = getargsdict(x, b'sort', b'set keys topo.firstbranch')
+    if b'set' not in args:
         # i18n: "sort" is a keyword
-        raise error.ParseError(_('sort requires one or two arguments'))
-    keys = "rev"
-    if 'keys' in args:
+        raise error.ParseError(_(b'sort requires one or two arguments'))
+    keys = b"rev"
+    if b'keys' in args:
         # i18n: "sort" is a keyword
-        keys = getstring(args['keys'], _("sort spec must be a string"))
+        keys = getstring(args[b'keys'], _(b"sort spec must be a string"))
 
     keyflags = []
     for k in keys.split():
         fk = k
-        reverse = k.startswith('-')
+        reverse = k.startswith(b'-')
         if reverse:
             k = k[1:]
-        if k not in _sortkeyfuncs and k != 'topo':
+        if k not in _sortkeyfuncs and k != b'topo':
             raise error.ParseError(
-                _("unknown sort key %r") % pycompat.bytestr(fk)
+                _(b"unknown sort key %r") % pycompat.bytestr(fk)
             )
         keyflags.append((k, reverse))
 
-    if len(keyflags) > 1 and any(k == 'topo' for k, reverse in keyflags):
+    if len(keyflags) > 1 and any(k == b'topo' for k, reverse in keyflags):
         # i18n: "topo" is a keyword
         raise error.ParseError(
-            _('topo sort order cannot be combined ' 'with other sort keys')
+            _(b'topo sort order cannot be combined ' b'with other sort keys')
         )
 
     opts = {}
-    if 'topo.firstbranch' in args:
-        if any(k == 'topo' for k, reverse in keyflags):
-            opts['topo.firstbranch'] = args['topo.firstbranch']
+    if b'topo.firstbranch' in args:
+        if any(k == b'topo' for k, reverse in keyflags):
+            opts[b'topo.firstbranch'] = args[b'topo.firstbranch']
         else:
             # i18n: "topo" and "topo.firstbranch" are keywords
             raise error.ParseError(
                 _(
-                    'topo.firstbranch can only be used '
-                    'when using the topo sort key'
+                    b'topo.firstbranch can only be used '
+                    b'when using the topo sort key'
                 )
             )
 
-    return args['set'], keyflags, opts
+    return args[b'set'], keyflags, opts
 
 
 @predicate(
-    'sort(set[, [-]key... [, ...]])', safe=True, takeorder=True, weight=10
+    b'sort(set[, [-]key... [, ...]])', safe=True, takeorder=True, weight=10
 )
 def sort(repo, subset, x, order):
     """Sort set by keys. The default sort order is ascending, specify a key
@@ -2318,13 +2328,13 @@
 
     if not keyflags or order != defineorder:
         return revs
-    if len(keyflags) == 1 and keyflags[0][0] == "rev":
+    if len(keyflags) == 1 and keyflags[0][0] == b"rev":
         revs.sort(reverse=keyflags[0][1])
         return revs
-    elif keyflags[0][0] == "topo":
+    elif keyflags[0][0] == b"topo":
         firstbranch = ()
-        if 'topo.firstbranch' in opts:
-            firstbranch = getset(repo, subset, opts['topo.firstbranch'])
+        if b'topo.firstbranch' in opts:
+            firstbranch = getset(repo, subset, opts[b'topo.firstbranch'])
         revs = baseset(
             dagop.toposort(revs, repo.changelog.parentrevs, firstbranch),
             istopo=True,
@@ -2340,18 +2350,18 @@
     return baseset([c.rev() for c in ctxs])
 
 
-@predicate('subrepo([pattern])')
+@predicate(b'subrepo([pattern])')
 def subrepo(repo, subset, x):
     """Changesets that add, modify or remove the given subrepo.  If no subrepo
     pattern is named, any subrepo changes are returned.
     """
     # i18n: "subrepo" is a keyword
-    args = getargs(x, 0, 1, _('subrepo takes at most one argument'))
+    args = getargs(x, 0, 1, _(b'subrepo takes at most one argument'))
     pat = None
     if len(args) != 0:
-        pat = getstring(args[0], _("subrepo requires a pattern"))
-
-    m = matchmod.exact(['.hgsubstate'])
+        pat = getstring(args[0], _(b"subrepo requires a pattern"))
+
+    m = matchmod.exact([b'.hgsubstate'])
 
     def submatches(names):
         k, p, m = stringutil.stringmatcher(pat)
@@ -2382,7 +2392,7 @@
 
         return False
 
-    return subset.filter(matches, condrepr=('<subrepo %r>', pat))
+    return subset.filter(matches, condrepr=(b'<subrepo %r>', pat))
 
 
 def _mapbynodefunc(repo, s, f):
@@ -2400,7 +2410,7 @@
     return smartset.baseset(result - repo.changelog.filteredrevs)
 
 
-@predicate('successors(set)', safe=True)
+@predicate(b'successors(set)', safe=True)
 def successors(repo, subset, x):
     """All successors for set, including the given set themselves"""
     s = getset(repo, fullreposet(repo), x)
@@ -2413,7 +2423,7 @@
     kind, pattern, matcher = stringutil.stringmatcher(
         pattern, casesensitive=casesensitive
     )
-    if kind == 'literal':
+    if kind == b'literal':
         if not casesensitive:
             pattern = encoding.lower(pattern)
             matcher = lambda s: pattern in encoding.lower(s)
@@ -2422,7 +2432,7 @@
     return kind, pattern, matcher
 
 
-@predicate('tag([name])', safe=True)
+@predicate(b'tag([name])', safe=True)
 def tag(repo, subset, x):
     """The specified tag by name, or all tagged revisions if no name is given.
 
@@ -2430,46 +2440,46 @@
     :hg:`help revisions.patterns`.
     """
     # i18n: "tag" is a keyword
-    args = getargs(x, 0, 1, _("tag takes one or no arguments"))
+    args = getargs(x, 0, 1, _(b"tag takes one or no arguments"))
     cl = repo.changelog
     if args:
         pattern = getstring(
             args[0],
             # i18n: "tag" is a keyword
-            _('the argument to tag must be a string'),
+            _(b'the argument to tag must be a string'),
         )
         kind, pattern, matcher = stringutil.stringmatcher(pattern)
-        if kind == 'literal':
+        if kind == b'literal':
             # avoid resolving all tags
             tn = repo._tagscache.tags.get(pattern, None)
             if tn is None:
                 raise error.RepoLookupError(
-                    _("tag '%s' does not exist") % pattern
+                    _(b"tag '%s' does not exist") % pattern
                 )
             s = {repo[tn].rev()}
         else:
             s = {cl.rev(n) for t, n in repo.tagslist() if matcher(t)}
     else:
-        s = {cl.rev(n) for t, n in repo.tagslist() if t != 'tip'}
+        s = {cl.rev(n) for t, n in repo.tagslist() if t != b'tip'}
     return subset & s
 
 
-@predicate('tagged', safe=True)
+@predicate(b'tagged', safe=True)
 def tagged(repo, subset, x):
     return tag(repo, subset, x)
 
 
-@predicate('orphan()', safe=True)
+@predicate(b'orphan()', safe=True)
 def orphan(repo, subset, x):
     """Non-obsolete changesets with obsolete ancestors. (EXPERIMENTAL)
     """
     # i18n: "orphan" is a keyword
-    getargs(x, 0, 0, _("orphan takes no arguments"))
-    orphan = obsmod.getrevs(repo, 'orphan')
+    getargs(x, 0, 0, _(b"orphan takes no arguments"))
+    orphan = obsmod.getrevs(repo, b'orphan')
     return subset & orphan
 
 
-@predicate('user(string)', safe=True, weight=10)
+@predicate(b'user(string)', safe=True, weight=10)
 def user(repo, subset, x):
     """User name contains string. The match is case-insensitive.
 
@@ -2479,18 +2489,18 @@
     return author(repo, subset, x)
 
 
-@predicate('wdir()', safe=True, weight=0)
+@predicate(b'wdir()', safe=True, weight=0)
 def wdir(repo, subset, x):
     """Working directory. (EXPERIMENTAL)"""
     # i18n: "wdir" is a keyword
-    getargs(x, 0, 0, _("wdir takes no arguments"))
+    getargs(x, 0, 0, _(b"wdir takes no arguments"))
     if node.wdirrev in subset or isinstance(subset, fullreposet):
         return baseset([node.wdirrev])
     return baseset()
 
 
 def _orderedlist(repo, subset, x):
-    s = getstring(x, "internal error")
+    s = getstring(x, b"internal error")
     if not s:
         return baseset()
     # remove duplicates here. it's difficult for caller to deduplicate sets
@@ -2498,11 +2508,11 @@
     cl = repo.changelog
     ls = []
     seen = set()
-    for t in s.split('\0'):
+    for t in s.split(b'\0'):
         try:
             # fast path for integer revision
             r = int(t)
-            if ('%d' % r) != t or r not in cl:
+            if (b'%d' % r) != t or r not in cl:
                 raise ValueError
             revs = [r]
         except ValueError:
@@ -2522,7 +2532,7 @@
 
 
 # for internal use
-@predicate('_list', safe=True, takeorder=True)
+@predicate(b'_list', safe=True, takeorder=True)
 def _list(repo, subset, x, order):
     if order == followorder:
         # slow path to take the subset order
@@ -2532,16 +2542,16 @@
 
 
 def _orderedintlist(repo, subset, x):
-    s = getstring(x, "internal error")
+    s = getstring(x, b"internal error")
     if not s:
         return baseset()
-    ls = [int(r) for r in s.split('\0')]
+    ls = [int(r) for r in s.split(b'\0')]
     s = subset
     return baseset([r for r in ls if r in s])
 
 
 # for internal use
-@predicate('_intlist', safe=True, takeorder=True, weight=0)
+@predicate(b'_intlist', safe=True, takeorder=True, weight=0)
 def _intlist(repo, subset, x, order):
     if order == followorder:
         # slow path to take the subset order
@@ -2551,17 +2561,17 @@
 
 
 def _orderedhexlist(repo, subset, x):
-    s = getstring(x, "internal error")
+    s = getstring(x, b"internal error")
     if not s:
         return baseset()
     cl = repo.changelog
-    ls = [cl.rev(node.bin(r)) for r in s.split('\0')]
+    ls = [cl.rev(node.bin(r)) for r in s.split(b'\0')]
     s = subset
     return baseset([r for r in ls if r in s])
 
 
 # for internal use
-@predicate('_hexlist', safe=True, takeorder=True)
+@predicate(b'_hexlist', safe=True, takeorder=True)
 def _hexlist(repo, subset, x, order):
     if order == followorder:
         # slow path to take the subset order
@@ -2571,33 +2581,33 @@
 
 
 methods = {
-    "range": rangeset,
-    "rangeall": rangeall,
-    "rangepre": rangepre,
-    "rangepost": rangepost,
-    "dagrange": dagrange,
-    "string": stringset,
-    "symbol": stringset,
-    "and": andset,
-    "andsmally": andsmallyset,
-    "or": orset,
-    "not": notset,
-    "difference": differenceset,
-    "relation": relationset,
-    "relsubscript": relsubscriptset,
-    "subscript": subscriptset,
-    "list": listset,
-    "keyvalue": keyvaluepair,
-    "func": func,
-    "ancestor": ancestorspec,
-    "parent": parentspec,
-    "parentpost": parentpost,
-    "smartset": rawsmartset,
+    b"range": rangeset,
+    b"rangeall": rangeall,
+    b"rangepre": rangepre,
+    b"rangepost": rangepost,
+    b"dagrange": dagrange,
+    b"string": stringset,
+    b"symbol": stringset,
+    b"and": andset,
+    b"andsmally": andsmallyset,
+    b"or": orset,
+    b"not": notset,
+    b"difference": differenceset,
+    b"relation": relationset,
+    b"relsubscript": relsubscriptset,
+    b"subscript": subscriptset,
+    b"list": listset,
+    b"keyvalue": keyvaluepair,
+    b"func": func,
+    b"ancestor": ancestorspec,
+    b"parent": parentspec,
+    b"parentpost": parentpost,
+    b"smartset": rawsmartset,
 }
 
 subscriptrelations = {
-    "g": generationsrel,
-    "generations": generationsrel,
+    b"g": generationsrel,
+    b"generations": generationsrel,
 }
 
 
@@ -2627,19 +2637,19 @@
 
         return mfunc
     if not all(specs):
-        raise error.ParseError(_("empty query"))
+        raise error.ParseError(_(b"empty query"))
     if len(specs) == 1:
         tree = revsetlang.parse(specs[0], lookup)
     else:
         tree = (
-            'or',
-            ('list',) + tuple(revsetlang.parse(s, lookup) for s in specs),
+            b'or',
+            (b'list',) + tuple(revsetlang.parse(s, lookup) for s in specs),
         )
 
     aliases = []
     warn = None
     if ui:
-        aliases.extend(ui.configitems('revsetalias'))
+        aliases.extend(ui.configitems(b'revsetalias'))
         warn = ui.warn
     if localalias:
         aliases.extend(localalias.items())
--- a/mercurial/revsetlang.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/revsetlang.py	Sun Oct 06 09:48:39 2019 -0400
@@ -22,63 +22,63 @@
 
 elements = {
     # token-type: binding-strength, primary, prefix, infix, suffix
-    "(": (21, None, ("group", 1, ")"), ("func", 1, ")"), None),
-    "[": (21, None, None, ("subscript", 1, "]"), None),
-    "#": (21, None, None, ("relation", 21), None),
-    "##": (20, None, None, ("_concat", 20), None),
-    "~": (18, None, None, ("ancestor", 18), None),
-    "^": (18, None, None, ("parent", 18), "parentpost"),
-    "-": (5, None, ("negate", 19), ("minus", 5), None),
-    "::": (
+    b"(": (21, None, (b"group", 1, b")"), (b"func", 1, b")"), None),
+    b"[": (21, None, None, (b"subscript", 1, b"]"), None),
+    b"#": (21, None, None, (b"relation", 21), None),
+    b"##": (20, None, None, (b"_concat", 20), None),
+    b"~": (18, None, None, (b"ancestor", 18), None),
+    b"^": (18, None, None, (b"parent", 18), b"parentpost"),
+    b"-": (5, None, (b"negate", 19), (b"minus", 5), None),
+    b"::": (
         17,
-        "dagrangeall",
-        ("dagrangepre", 17),
-        ("dagrange", 17),
-        "dagrangepost",
+        b"dagrangeall",
+        (b"dagrangepre", 17),
+        (b"dagrange", 17),
+        b"dagrangepost",
     ),
-    "..": (
+    b"..": (
         17,
-        "dagrangeall",
-        ("dagrangepre", 17),
-        ("dagrange", 17),
-        "dagrangepost",
+        b"dagrangeall",
+        (b"dagrangepre", 17),
+        (b"dagrange", 17),
+        b"dagrangepost",
     ),
-    ":": (15, "rangeall", ("rangepre", 15), ("range", 15), "rangepost"),
-    "not": (10, None, ("not", 10), None, None),
-    "!": (10, None, ("not", 10), None, None),
-    "and": (5, None, None, ("and", 5), None),
-    "&": (5, None, None, ("and", 5), None),
-    "%": (5, None, None, ("only", 5), "onlypost"),
-    "or": (4, None, None, ("or", 4), None),
-    "|": (4, None, None, ("or", 4), None),
-    "+": (4, None, None, ("or", 4), None),
-    "=": (3, None, None, ("keyvalue", 3), None),
-    ",": (2, None, None, ("list", 2), None),
-    ")": (0, None, None, None, None),
-    "]": (0, None, None, None, None),
-    "symbol": (0, "symbol", None, None, None),
-    "string": (0, "string", None, None, None),
-    "end": (0, None, None, None, None),
+    b":": (15, b"rangeall", (b"rangepre", 15), (b"range", 15), b"rangepost"),
+    b"not": (10, None, (b"not", 10), None, None),
+    b"!": (10, None, (b"not", 10), None, None),
+    b"and": (5, None, None, (b"and", 5), None),
+    b"&": (5, None, None, (b"and", 5), None),
+    b"%": (5, None, None, (b"only", 5), b"onlypost"),
+    b"or": (4, None, None, (b"or", 4), None),
+    b"|": (4, None, None, (b"or", 4), None),
+    b"+": (4, None, None, (b"or", 4), None),
+    b"=": (3, None, None, (b"keyvalue", 3), None),
+    b",": (2, None, None, (b"list", 2), None),
+    b")": (0, None, None, None, None),
+    b"]": (0, None, None, None, None),
+    b"symbol": (0, b"symbol", None, None, None),
+    b"string": (0, b"string", None, None, None),
+    b"end": (0, None, None, None, None),
 }
 
-keywords = {'and', 'or', 'not'}
+keywords = {b'and', b'or', b'not'}
 
 symbols = {}
 
-_quoteletters = {'"', "'"}
-_simpleopletters = set(pycompat.iterbytestr("()[]#:=,-|&+!~^%"))
+_quoteletters = {b'"', b"'"}
+_simpleopletters = set(pycompat.iterbytestr(b"()[]#:=,-|&+!~^%"))
 
 # default set of valid characters for the initial letter of symbols
 _syminitletters = set(
     pycompat.iterbytestr(
         pycompat.sysbytes(string.ascii_letters)
         + pycompat.sysbytes(string.digits)
-        + '._@'
+        + b'._@'
     )
 ) | set(map(pycompat.bytechr, pycompat.xrange(128, 256)))
 
 # default set of valid characters for non-initial letters of symbols
-_symletters = _syminitletters | set(pycompat.iterbytestr('-/'))
+_symletters = _syminitletters | set(pycompat.iterbytestr(b'-/'))
 
 
 def tokenize(program, lookup=None, syminitletters=None, symletters=None):
@@ -104,7 +104,7 @@
     '''
     if not isinstance(program, bytes):
         raise error.ProgrammingError(
-            'revset statement must be bytes, got %r' % program
+            b'revset statement must be bytes, got %r' % program
         )
     program = pycompat.bytestr(program)
     if syminitletters is None:
@@ -115,16 +115,16 @@
     if program and lookup:
         # attempt to parse old-style ranges first to deal with
         # things like old-tag which contain query metacharacters
-        parts = program.split(':', 1)
+        parts = program.split(b':', 1)
         if all(lookup(sym) for sym in parts if sym):
             if parts[0]:
-                yield ('symbol', parts[0], 0)
+                yield (b'symbol', parts[0], 0)
             if len(parts) > 1:
                 s = len(parts[0])
-                yield (':', None, s)
+                yield (b':', None, s)
                 if parts[1]:
-                    yield ('symbol', parts[1], s + 1)
-            yield ('end', None, len(program))
+                    yield (b'symbol', parts[1], s + 1)
+            yield (b'end', None, len(program))
             return
 
     pos, l = 0, len(program)
@@ -133,28 +133,28 @@
         if c.isspace():  # skip inter-token whitespace
             pass
         elif (
-            c == ':' and program[pos : pos + 2] == '::'
+            c == b':' and program[pos : pos + 2] == b'::'
         ):  # look ahead carefully
-            yield ('::', None, pos)
+            yield (b'::', None, pos)
             pos += 1  # skip ahead
         elif (
-            c == '.' and program[pos : pos + 2] == '..'
+            c == b'.' and program[pos : pos + 2] == b'..'
         ):  # look ahead carefully
-            yield ('..', None, pos)
+            yield (b'..', None, pos)
             pos += 1  # skip ahead
         elif (
-            c == '#' and program[pos : pos + 2] == '##'
+            c == b'#' and program[pos : pos + 2] == b'##'
         ):  # look ahead carefully
-            yield ('##', None, pos)
+            yield (b'##', None, pos)
             pos += 1  # skip ahead
         elif c in _simpleopletters:  # handle simple operators
             yield (c, None, pos)
         elif (
             c in _quoteletters
-            or c == 'r'
-            and program[pos : pos + 2] in ("r'", 'r"')
+            or c == b'r'
+            and program[pos : pos + 2] in (b"r'", b'r"')
         ):  # handle quoted strings
-            if c == 'r':
+            if c == b'r':
                 pos += 1
                 c = program[pos]
                 decode = lambda x: x
@@ -164,15 +164,15 @@
             s = pos
             while pos < l:  # find closing quote
                 d = program[pos]
-                if d == '\\':  # skip over escaped characters
+                if d == b'\\':  # skip over escaped characters
                     pos += 2
                     continue
                 if d == c:
-                    yield ('string', decode(program[s:pos]), s)
+                    yield (b'string', decode(program[s:pos]), s)
                     break
                 pos += 1
             else:
-                raise error.ParseError(_("unterminated string"), s)
+                raise error.ParseError(_(b"unterminated string"), s)
         # gather up a symbol/keyword
         elif c in syminitletters:
             s = pos
@@ -181,38 +181,40 @@
                 d = program[pos]
                 if d not in symletters:
                     break
-                if d == '.' and program[pos - 1] == '.':  # special case for ..
+                if (
+                    d == b'.' and program[pos - 1] == b'.'
+                ):  # special case for ..
                     pos -= 1
                     break
                 pos += 1
             sym = program[s:pos]
             if sym in keywords:  # operator keywords
                 yield (sym, None, s)
-            elif '-' in sym:
+            elif b'-' in sym:
                 # some jerk gave us foo-bar-baz, try to check if it's a symbol
                 if lookup and lookup(sym):
                     # looks like a real symbol
-                    yield ('symbol', sym, s)
+                    yield (b'symbol', sym, s)
                 else:
                     # looks like an expression
-                    parts = sym.split('-')
+                    parts = sym.split(b'-')
                     for p in parts[:-1]:
                         if p:  # possible consecutive -
-                            yield ('symbol', p, s)
+                            yield (b'symbol', p, s)
                         s += len(p)
-                        yield ('-', None, s)
+                        yield (b'-', None, s)
                         s += 1
                     if parts[-1]:  # possible trailing -
-                        yield ('symbol', parts[-1], s)
+                        yield (b'symbol', parts[-1], s)
             else:
-                yield ('symbol', sym, s)
+                yield (b'symbol', sym, s)
             pos -= 1
         else:
             raise error.ParseError(
-                _("syntax error in revset '%s'") % program, pos
+                _(b"syntax error in revset '%s'") % program, pos
             )
         pos += 1
-    yield ('end', None, pos)
+    yield (b'end', None, pos)
 
 
 # helpers
@@ -221,13 +223,13 @@
 
 
 def getsymbol(x):
-    if x and x[0] == 'symbol':
+    if x and x[0] == b'symbol':
         return x[1]
-    raise error.ParseError(_('not a symbol'))
+    raise error.ParseError(_(b'not a symbol'))
 
 
 def getstring(x, err):
-    if x and (x[0] == 'string' or x[0] == 'symbol'):
+    if x and (x[0] == b'string' or x[0] == b'symbol'):
         return x[1]
     raise error.ParseError(err)
 
@@ -251,7 +253,7 @@
 def getlist(x):
     if not x:
         return []
-    if x[0] == 'list':
+    if x[0] == b'list':
         return list(x[1:])
     return [x]
 
@@ -260,13 +262,13 @@
     if not x:
         raise error.ParseError(err)
     op = x[0]
-    if op == 'range':
+    if op == b'range':
         return x[1], x[2]
-    elif op == 'rangepre':
+    elif op == b'rangepre':
         return None, x[1]
-    elif op == 'rangepost':
+    elif op == b'rangepost':
         return x[1], None
-    elif op == 'rangeall':
+    elif op == b'rangeall':
         return None, None
     raise error.ParseError(err)
 
@@ -277,7 +279,7 @@
     If any of the sides omitted, and if no default provided, ParseError will
     be raised.
     """
-    if x and (x[0] == 'string' or x[0] == 'symbol'):
+    if x and (x[0] == b'string' or x[0] == b'symbol'):
         n = getinteger(x, err1)
         return n, n
     a, b = getrange(x, err1)
@@ -296,8 +298,8 @@
         getlist(x),
         funcname,
         parser.splitargspec(keys),
-        keyvaluenode='keyvalue',
-        keynode='symbol',
+        keyvaluenode=b'keyvalue',
+        keynode=b'symbol',
     )
 
 
@@ -320,7 +322,7 @@
     ('and', ('func', ('symbol', 'f'), ('string', '1')), ('symbol', '2'))
     """
     template = _cachedtree(tmplspec)
-    return parser.buildtree(template, ('symbol', '_'), *repls)
+    return parser.buildtree(template, (b'symbol', b'_'), *repls)
 
 
 def _match(patspec, tree):
@@ -333,12 +335,12 @@
     """
     pattern = _cachedtree(patspec)
     return parser.matchtree(
-        pattern, tree, ('symbol', '_'), {'keyvalue', 'list'}
+        pattern, tree, (b'symbol', b'_'), {b'keyvalue', b'list'}
     )
 
 
 def _matchonly(revs, bases):
-    return _match('ancestors(_) and not ancestors(_)', ('and', revs, bases))
+    return _match(b'ancestors(_) and not ancestors(_)', (b'and', revs, bases))
 
 
 def _fixops(x):
@@ -348,25 +350,25 @@
         return x
 
     op = x[0]
-    if op == 'parent':
+    if op == b'parent':
         # x^:y means (x^) : y, not x ^ (:y)
         # x^:  means (x^) :,   not x ^ (:)
-        post = ('parentpost', x[1])
-        if x[2][0] == 'dagrangepre':
-            return _fixops(('dagrange', post, x[2][1]))
-        elif x[2][0] == 'dagrangeall':
-            return _fixops(('dagrangepost', post))
-        elif x[2][0] == 'rangepre':
-            return _fixops(('range', post, x[2][1]))
-        elif x[2][0] == 'rangeall':
-            return _fixops(('rangepost', post))
-    elif op == 'or':
+        post = (b'parentpost', x[1])
+        if x[2][0] == b'dagrangepre':
+            return _fixops((b'dagrange', post, x[2][1]))
+        elif x[2][0] == b'dagrangeall':
+            return _fixops((b'dagrangepost', post))
+        elif x[2][0] == b'rangepre':
+            return _fixops((b'range', post, x[2][1]))
+        elif x[2][0] == b'rangeall':
+            return _fixops((b'rangepost', post))
+    elif op == b'or':
         # make number of arguments deterministic:
         # x + y + z -> (or x y z) -> (or (list x y z))
-        return (op, _fixops(('list',) + x[1:]))
-    elif op == 'subscript' and x[1][0] == 'relation':
+        return (op, _fixops((b'list',) + x[1:]))
+    elif op == b'subscript' and x[1][0] == b'relation':
         # x#y[z] ternary
-        return _fixops(('relsubscript', x[1][1], x[1][2], x[2]))
+        return _fixops((b'relsubscript', x[1][1], x[1][2], x[2]))
 
     return (op,) + tuple(_fixops(y) for y in x[1:])
 
@@ -376,53 +378,53 @@
         return x
 
     op = x[0]
-    if op == 'minus':
-        return _analyze(_build('_ and not _', *x[1:]))
-    elif op == 'only':
-        return _analyze(_build('only(_, _)', *x[1:]))
-    elif op == 'onlypost':
-        return _analyze(_build('only(_)', x[1]))
-    elif op == 'dagrangeall':
-        raise error.ParseError(_("can't use '::' in this context"))
-    elif op == 'dagrangepre':
-        return _analyze(_build('ancestors(_)', x[1]))
-    elif op == 'dagrangepost':
-        return _analyze(_build('descendants(_)', x[1]))
-    elif op == 'negate':
-        s = getstring(x[1], _("can't negate that"))
-        return _analyze(('string', '-' + s))
-    elif op in ('string', 'symbol', 'smartset'):
+    if op == b'minus':
+        return _analyze(_build(b'_ and not _', *x[1:]))
+    elif op == b'only':
+        return _analyze(_build(b'only(_, _)', *x[1:]))
+    elif op == b'onlypost':
+        return _analyze(_build(b'only(_)', x[1]))
+    elif op == b'dagrangeall':
+        raise error.ParseError(_(b"can't use '::' in this context"))
+    elif op == b'dagrangepre':
+        return _analyze(_build(b'ancestors(_)', x[1]))
+    elif op == b'dagrangepost':
+        return _analyze(_build(b'descendants(_)', x[1]))
+    elif op == b'negate':
+        s = getstring(x[1], _(b"can't negate that"))
+        return _analyze((b'string', b'-' + s))
+    elif op in (b'string', b'symbol', b'smartset'):
         return x
-    elif op == 'rangeall':
+    elif op == b'rangeall':
         return (op, None)
-    elif op in {'or', 'not', 'rangepre', 'rangepost', 'parentpost'}:
+    elif op in {b'or', b'not', b'rangepre', b'rangepost', b'parentpost'}:
         return (op, _analyze(x[1]))
-    elif op == 'group':
+    elif op == b'group':
         return _analyze(x[1])
     elif op in {
-        'and',
-        'dagrange',
-        'range',
-        'parent',
-        'ancestor',
-        'relation',
-        'subscript',
+        b'and',
+        b'dagrange',
+        b'range',
+        b'parent',
+        b'ancestor',
+        b'relation',
+        b'subscript',
     }:
         ta = _analyze(x[1])
         tb = _analyze(x[2])
         return (op, ta, tb)
-    elif op == 'relsubscript':
+    elif op == b'relsubscript':
         ta = _analyze(x[1])
         tb = _analyze(x[2])
         tc = _analyze(x[3])
         return (op, ta, tb, tc)
-    elif op == 'list':
+    elif op == b'list':
         return (op,) + tuple(_analyze(y) for y in x[1:])
-    elif op == 'keyvalue':
+    elif op == b'keyvalue':
         return (op, x[1], _analyze(x[2]))
-    elif op == 'func':
+    elif op == b'func':
         return (op, x[1], _analyze(x[2]))
-    raise ValueError('invalid operator %r' % op)
+    raise ValueError(b'invalid operator %r' % op)
 
 
 def analyze(x):
@@ -440,30 +442,30 @@
         return 0, x
 
     op = x[0]
-    if op in ('string', 'symbol', 'smartset'):
+    if op in (b'string', b'symbol', b'smartset'):
         return 0.5, x  # single revisions are small
-    elif op == 'and':
+    elif op == b'and':
         wa, ta = _optimize(x[1])
         wb, tb = _optimize(x[2])
         w = min(wa, wb)
 
         # (draft/secret/_notpublic() & ::x) have a fast path
-        m = _match('_() & ancestors(_)', ('and', ta, tb))
-        if m and getsymbol(m[1]) in {'draft', 'secret', '_notpublic'}:
-            return w, _build('_phaseandancestors(_, _)', m[1], m[2])
+        m = _match(b'_() & ancestors(_)', (b'and', ta, tb))
+        if m and getsymbol(m[1]) in {b'draft', b'secret', b'_notpublic'}:
+            return w, _build(b'_phaseandancestors(_, _)', m[1], m[2])
 
         # (::x and not ::y)/(not ::y and ::x) have a fast path
         m = _matchonly(ta, tb) or _matchonly(tb, ta)
         if m:
-            return w, _build('only(_, _)', *m[1:])
+            return w, _build(b'only(_, _)', *m[1:])
 
-        m = _match('not _', tb)
+        m = _match(b'not _', tb)
         if m:
-            return wa, ('difference', ta, m[1])
+            return wa, (b'difference', ta, m[1])
         if wa > wb:
-            op = 'andsmally'
+            op = b'andsmally'
         return w, (op, ta, tb)
-    elif op == 'or':
+    elif op == b'or':
         # fast path for machine-generated expression, that is likely to have
         # lots of trivial revisions: 'a + b + c()' to '_list(a b) + c()'
         ws, ts, ss = [], [], []
@@ -474,8 +476,8 @@
             if len(ss) == 1:
                 w, t = ss[0]
             else:
-                s = '\0'.join(t[1] for w, t in ss)
-                y = _build('_list(_)', ('string', s))
+                s = b'\0'.join(t[1] for w, t in ss)
+                y = _build(b'_list(_)', (b'string', s))
                 w, t = _optimize(y)
             ws.append(w)
             ts.append(t)
@@ -483,7 +485,7 @@
 
         for y in getlist(x[1]):
             w, t = _optimize(y)
-            if t is not None and (t[0] == 'string' or t[0] == 'symbol'):
+            if t is not None and (t[0] == b'string' or t[0] == b'symbol'):
                 ss.append((w, t))
                 continue
             flushss()
@@ -492,48 +494,48 @@
         flushss()
         if len(ts) == 1:
             return ws[0], ts[0]  # 'or' operation is fully optimized out
-        return max(ws), (op, ('list',) + tuple(ts))
-    elif op == 'not':
+        return max(ws), (op, (b'list',) + tuple(ts))
+    elif op == b'not':
         # Optimize not public() to _notpublic() because we have a fast version
-        if _match('public()', x[1]):
-            o = _optimize(_build('_notpublic()'))
+        if _match(b'public()', x[1]):
+            o = _optimize(_build(b'_notpublic()'))
             return o[0], o[1]
         else:
             o = _optimize(x[1])
             return o[0], (op, o[1])
-    elif op == 'rangeall':
+    elif op == b'rangeall':
         return 1, x
-    elif op in ('rangepre', 'rangepost', 'parentpost'):
+    elif op in (b'rangepre', b'rangepost', b'parentpost'):
         o = _optimize(x[1])
         return o[0], (op, o[1])
-    elif op in ('dagrange', 'range'):
+    elif op in (b'dagrange', b'range'):
         wa, ta = _optimize(x[1])
         wb, tb = _optimize(x[2])
         return wa + wb, (op, ta, tb)
-    elif op in ('parent', 'ancestor', 'relation', 'subscript'):
+    elif op in (b'parent', b'ancestor', b'relation', b'subscript'):
         w, t = _optimize(x[1])
         return w, (op, t, x[2])
-    elif op == 'relsubscript':
+    elif op == b'relsubscript':
         w, t = _optimize(x[1])
         return w, (op, t, x[2], x[3])
-    elif op == 'list':
+    elif op == b'list':
         ws, ts = zip(*(_optimize(y) for y in x[1:]))
         return sum(ws), (op,) + ts
-    elif op == 'keyvalue':
+    elif op == b'keyvalue':
         w, t = _optimize(x[2])
         return w, (op, x[1], t)
-    elif op == 'func':
+    elif op == b'func':
         f = getsymbol(x[1])
         wa, ta = _optimize(x[2])
         w = getattr(symbols.get(f), '_weight', 1)
-        m = _match('commonancestors(_)', ta)
+        m = _match(b'commonancestors(_)', ta)
 
         # Optimize heads(commonancestors(_)) because we have a fast version
-        if f == 'heads' and m:
-            return w + wa, _build('_commonancestorheads(_)', m[1])
+        if f == b'heads' and m:
+            return w + wa, _build(b'_commonancestorheads(_)', m[1])
 
         return w + wa, (op, x[1], ta)
-    raise ValueError('invalid operator %r' % op)
+    raise ValueError(b'invalid operator %r' % op)
 
 
 def optimize(tree):
@@ -547,7 +549,7 @@
 
 # the set of valid characters for the initial letter of symbols in
 # alias declarations and definitions
-_aliassyminitletters = _syminitletters | {'$'}
+_aliassyminitletters = _syminitletters | {b'$'}
 
 
 def _parsewith(spec, lookup=None, syminitletters=None):
@@ -564,21 +566,21 @@
       ...
     ParseError: ('invalid token', 4)
     """
-    if lookup and spec.startswith('revset(') and spec.endswith(')'):
+    if lookup and spec.startswith(b'revset(') and spec.endswith(b')'):
         lookup = None
     p = parser.parser(elements)
     tree, pos = p.parse(
         tokenize(spec, lookup=lookup, syminitletters=syminitletters)
     )
     if pos != len(spec):
-        raise error.ParseError(_('invalid token'), pos)
-    return _fixops(parser.simplifyinfixops(tree, ('list', 'or')))
+        raise error.ParseError(_(b'invalid token'), pos)
+    return _fixops(parser.simplifyinfixops(tree, (b'list', b'or')))
 
 
 class _aliasrules(parser.basealiasrules):
     """Parsing and expansion rule set of revset aliases"""
 
-    _section = _('revset alias')
+    _section = _(b'revset alias')
 
     @staticmethod
     def _parse(spec):
@@ -592,7 +594,7 @@
 
     @staticmethod
     def _trygetfunc(tree):
-        if tree[0] == 'func' and tree[1][0] == 'symbol':
+        if tree[0] == b'func' and tree[1][0] == b'symbol':
             return tree[1][1], getlist(tree[2])
 
 
@@ -604,7 +606,7 @@
     if warn is not None:
         for name, alias in sorted(aliases.iteritems()):
             if alias.error and not alias.warned:
-                warn(_('warning: %s\n') % (alias.error))
+                warn(_(b'warning: %s\n') % (alias.error))
                 alias.warned = True
     return tree
 
@@ -613,24 +615,24 @@
     """Fold elements to be concatenated by `##`
     """
     if not isinstance(tree, tuple) or tree[0] in (
-        'string',
-        'symbol',
-        'smartset',
+        b'string',
+        b'symbol',
+        b'smartset',
     ):
         return tree
-    if tree[0] == '_concat':
+    if tree[0] == b'_concat':
         pending = [tree]
         l = []
         while pending:
             e = pending.pop()
-            if e[0] == '_concat':
+            if e[0] == b'_concat':
                 pending.extend(reversed(e[1:]))
-            elif e[0] in ('string', 'symbol'):
+            elif e[0] in (b'string', b'symbol'):
                 l.append(e[1])
             else:
-                msg = _("\"##\" can't concatenate \"%s\" element") % (e[0])
+                msg = _(b"\"##\" can't concatenate \"%s\" element") % (e[0])
                 raise error.ParseError(msg)
-        return ('string', ''.join(l))
+        return (b'string', b''.join(l))
     else:
         return tuple(foldconcat(t) for t in tree)
 
@@ -642,12 +644,12 @@
         if len(inst.args) > 1:  # has location
             loc = inst.args[1]
             # Remove newlines -- spaces are equivalent whitespace.
-            spec = spec.replace('\n', ' ')
+            spec = spec.replace(b'\n', b' ')
             # We want the caret to point to the place in the template that
             # failed to parse, but in a hint we get a open paren at the
             # start. Therefore, we print "loc + 1" spaces (instead of "loc")
             # to line up the caret with the location of the error.
-            inst.hint = spec + '\n' + ' ' * (loc + 1) + '^ ' + _('here')
+            inst.hint = spec + b'\n' + b' ' * (loc + 1) + b'^ ' + _(b'here')
         raise
 
 
@@ -663,70 +665,70 @@
     >>> _quote(1)
     "'1'"
     """
-    return "'%s'" % stringutil.escapestr(pycompat.bytestr(s))
+    return b"'%s'" % stringutil.escapestr(pycompat.bytestr(s))
 
 
 def _formatargtype(c, arg):
-    if c == 'd':
-        return '_rev(%d)' % int(arg)
-    elif c == 's':
+    if c == b'd':
+        return b'_rev(%d)' % int(arg)
+    elif c == b's':
         return _quote(arg)
-    elif c == 'r':
+    elif c == b'r':
         if not isinstance(arg, bytes):
             raise TypeError
         parse(arg)  # make sure syntax errors are confined
-        return '(%s)' % arg
-    elif c == 'n':
+        return b'(%s)' % arg
+    elif c == b'n':
         return _quote(node.hex(arg))
-    elif c == 'b':
+    elif c == b'b':
         try:
             return _quote(arg.branch())
         except AttributeError:
             raise TypeError
-    raise error.ParseError(_('unexpected revspec format character %s') % c)
+    raise error.ParseError(_(b'unexpected revspec format character %s') % c)
 
 
 def _formatlistexp(s, t):
     l = len(s)
     if l == 0:
-        return "_list('')"
+        return b"_list('')"
     elif l == 1:
         return _formatargtype(t, s[0])
-    elif t == 'd':
+    elif t == b'd':
         return _formatintlist(s)
-    elif t == 's':
-        return "_list(%s)" % _quote("\0".join(s))
-    elif t == 'n':
-        return "_hexlist('%s')" % "\0".join(node.hex(a) for a in s)
-    elif t == 'b':
+    elif t == b's':
+        return b"_list(%s)" % _quote(b"\0".join(s))
+    elif t == b'n':
+        return b"_hexlist('%s')" % b"\0".join(node.hex(a) for a in s)
+    elif t == b'b':
         try:
-            return "_list('%s')" % "\0".join(a.branch() for a in s)
+            return b"_list('%s')" % b"\0".join(a.branch() for a in s)
         except AttributeError:
             raise TypeError
 
     m = l // 2
-    return '(%s or %s)' % (_formatlistexp(s[:m], t), _formatlistexp(s[m:], t))
+    return b'(%s or %s)' % (_formatlistexp(s[:m], t), _formatlistexp(s[m:], t))
 
 
 def _formatintlist(data):
     try:
         l = len(data)
         if l == 0:
-            return "_list('')"
+            return b"_list('')"
         elif l == 1:
-            return _formatargtype('d', data[0])
-        return "_intlist('%s')" % "\0".join('%d' % int(a) for a in data)
+            return _formatargtype(b'd', data[0])
+        return b"_intlist('%s')" % b"\0".join(b'%d' % int(a) for a in data)
     except (TypeError, ValueError):
-        raise error.ParseError(_('invalid argument for revspec'))
+        raise error.ParseError(_(b'invalid argument for revspec'))
 
 
 def _formatparamexp(args, t):
-    return ', '.join(_formatargtype(t, a) for a in args)
+    return b', '.join(_formatargtype(t, a) for a in args)
 
 
 _formatlistfuncs = {
-    'l': _formatlistexp,
-    'p': _formatparamexp,
+    b'l': _formatlistexp,
+    b'p': _formatparamexp,
 }
 
 
@@ -772,12 +774,12 @@
     for t, arg in parsed:
         if t is None:
             ret.append(arg)
-        elif t == 'baseset':
+        elif t == b'baseset':
             if isinstance(arg, set):
                 arg = sorted(arg)
             ret.append(_formatintlist(list(arg)))
         else:
-            raise error.ProgrammingError("unknown revspec item type: %r" % t)
+            raise error.ProgrammingError(b"unknown revspec item type: %r" % t)
     return b''.join(ret)
 
 
@@ -789,15 +791,15 @@
     for t, arg in parsed:
         if t is None:
             ret.append(arg)
-        elif t == 'baseset':
-            newtree = ('smartset', smartset.baseset(arg))
+        elif t == b'baseset':
+            newtree = (b'smartset', smartset.baseset(arg))
             inputs.append(newtree)
-            ret.append("$")
+            ret.append(b"$")
         else:
-            raise error.ProgrammingError("unknown revspec item type: %r" % t)
+            raise error.ProgrammingError(b"unknown revspec item type: %r" % t)
     expr = b''.join(ret)
     tree = _parsewith(expr, syminitletters=_aliassyminitletters)
-    tree = parser.buildtree(tree, ('symbol', '$'), *inputs)
+    tree = parser.buildtree(tree, (b'symbol', b'$'), *inputs)
     tree = foldconcat(tree)
     tree = analyze(tree)
     tree = optimize(tree)
@@ -818,7 +820,7 @@
     ret = []
     pos = 0
     while pos < len(expr):
-        q = expr.find('%', pos)
+        q = expr.find(b'%', pos)
         if q < 0:
             ret.append((None, expr[pos:]))
             break
@@ -827,8 +829,8 @@
         try:
             d = expr[pos]
         except IndexError:
-            raise error.ParseError(_('incomplete revspec format character'))
-        if d == '%':
+            raise error.ParseError(_(b'incomplete revspec format character'))
+        if d == b'%':
             ret.append((None, d))
             pos += 1
             continue
@@ -836,45 +838,47 @@
         try:
             arg = next(argiter)
         except StopIteration:
-            raise error.ParseError(_('missing argument for revspec'))
+            raise error.ParseError(_(b'missing argument for revspec'))
         f = _formatlistfuncs.get(d)
         if f:
             # a list of some type, might be expensive, do not replace
             pos += 1
-            islist = d == 'l'
+            islist = d == b'l'
             try:
                 d = expr[pos]
             except IndexError:
-                raise error.ParseError(_('incomplete revspec format character'))
-            if islist and d == 'd' and arg:
+                raise error.ParseError(
+                    _(b'incomplete revspec format character')
+                )
+            if islist and d == b'd' and arg:
                 # we don't create a baseset yet, because it come with an
                 # extra cost. If we are going to serialize it we better
                 # skip it.
-                ret.append(('baseset', arg))
+                ret.append((b'baseset', arg))
                 pos += 1
                 continue
             try:
                 ret.append((None, f(list(arg), d)))
             except (TypeError, ValueError):
-                raise error.ParseError(_('invalid argument for revspec'))
+                raise error.ParseError(_(b'invalid argument for revspec'))
         else:
             # a single entry, not expensive, replace
             try:
                 ret.append((None, _formatargtype(d, arg)))
             except (TypeError, ValueError):
-                raise error.ParseError(_('invalid argument for revspec'))
+                raise error.ParseError(_(b'invalid argument for revspec'))
         pos += 1
 
     try:
         next(argiter)
-        raise error.ParseError(_('too many revspec arguments specified'))
+        raise error.ParseError(_(b'too many revspec arguments specified'))
     except StopIteration:
         pass
     return ret
 
 
 def prettyformat(tree):
-    return parser.prettyformat(tree, ('string', 'symbol'))
+    return parser.prettyformat(tree, (b'string', b'symbol'))
 
 
 def depth(tree):
@@ -885,18 +889,18 @@
 
 
 def funcsused(tree):
-    if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'):
+    if not isinstance(tree, tuple) or tree[0] in (b'string', b'symbol'):
         return set()
     else:
         funcs = set()
         for s in tree[1:]:
             funcs |= funcsused(s)
-        if tree[0] == 'func':
+        if tree[0] == b'func':
             funcs.add(tree[1][1])
         return funcs
 
 
-_hashre = util.re.compile('[0-9a-fA-F]{1,40}$')
+_hashre = util.re.compile(b'[0-9a-fA-F]{1,40}$')
 
 
 def _ishashlikesymbol(symbol):
@@ -919,7 +923,7 @@
     if not tree:
         return []
 
-    if tree[0] == "symbol":
+    if tree[0] == b"symbol":
         if _ishashlikesymbol(tree[1]):
             return [tree[1]]
     elif len(tree) >= 3:
--- a/mercurial/rewriteutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/rewriteutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -17,29 +17,29 @@
 )
 
 
-def precheck(repo, revs, action='rewrite'):
+def precheck(repo, revs, action=b'rewrite'):
     """check if revs can be rewritten
     action is used to control the error message.
 
     Make sure this function is called after taking the lock.
     """
     if node.nullrev in revs:
-        msg = _("cannot %s null changeset") % action
-        hint = _("no changeset checked out")
+        msg = _(b"cannot %s null changeset") % action
+        hint = _(b"no changeset checked out")
         raise error.Abort(msg, hint=hint)
 
     if len(repo[None].parents()) > 1:
-        raise error.Abort(_("cannot %s while merging") % action)
+        raise error.Abort(_(b"cannot %s while merging") % action)
 
-    publicrevs = repo.revs('%ld and public()', revs)
+    publicrevs = repo.revs(b'%ld and public()', revs)
     if publicrevs:
-        msg = _("cannot %s public changesets") % action
-        hint = _("see 'hg help phases' for details")
+        msg = _(b"cannot %s public changesets") % action
+        hint = _(b"see 'hg help phases' for details")
         raise error.Abort(msg, hint=hint)
 
     newunstable = disallowednewunstable(repo, revs)
     if newunstable:
-        raise error.Abort(_("cannot %s changeset with children") % action)
+        raise error.Abort(_(b"cannot %s changeset with children") % action)
 
 
 def disallowednewunstable(repo, revs):
@@ -52,4 +52,4 @@
     allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
     if allowunstable:
         return revset.baseset()
-    return repo.revs("(%ld::) - %ld", revs, revs)
+    return repo.revs(b"(%ld::) - %ld", revs, revs)
--- a/mercurial/scmposix.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/scmposix.py	Sun Oct 06 09:48:39 2019 -0400
@@ -16,18 +16,18 @@
 # $MORE variable, but there's no compatible option with Linux 'more'. Given
 # OS X is widely used and most modern Unix systems would have 'less', setting
 # 'less' as the default seems reasonable.
-fallbackpager = 'less'
+fallbackpager = b'less'
 
 
 def _rcfiles(path):
-    rcs = [os.path.join(path, 'hgrc')]
-    rcdir = os.path.join(path, 'hgrc.d')
+    rcs = [os.path.join(path, b'hgrc')]
+    rcdir = os.path.join(path, b'hgrc.d')
     try:
         rcs.extend(
             [
                 os.path.join(rcdir, f)
                 for f, kind in util.listdir(rcdir)
-                if f.endswith(".rc")
+                if f.endswith(b".rc")
             ]
         )
     except OSError:
@@ -37,32 +37,32 @@
 
 def systemrcpath():
     path = []
-    if pycompat.sysplatform == 'plan9':
-        root = 'lib/mercurial'
+    if pycompat.sysplatform == b'plan9':
+        root = b'lib/mercurial'
     else:
-        root = 'etc/mercurial'
+        root = b'etc/mercurial'
     # old mod_python does not set sys.argv
     if len(getattr(sys, 'argv', [])) > 0:
         p = os.path.dirname(os.path.dirname(pycompat.sysargv[0]))
-        if p != '/':
+        if p != b'/':
             path.extend(_rcfiles(os.path.join(p, root)))
-    path.extend(_rcfiles('/' + root))
+    path.extend(_rcfiles(b'/' + root))
     return path
 
 
 def userrcpath():
-    if pycompat.sysplatform == 'plan9':
-        return [encoding.environ['home'] + '/lib/hgrc']
+    if pycompat.sysplatform == b'plan9':
+        return [encoding.environ[b'home'] + b'/lib/hgrc']
     elif pycompat.isdarwin:
-        return [os.path.expanduser('~/.hgrc')]
+        return [os.path.expanduser(b'~/.hgrc')]
     else:
-        confighome = encoding.environ.get('XDG_CONFIG_HOME')
+        confighome = encoding.environ.get(b'XDG_CONFIG_HOME')
         if confighome is None or not os.path.isabs(confighome):
-            confighome = os.path.expanduser('~/.config')
+            confighome = os.path.expanduser(b'~/.config')
 
         return [
-            os.path.expanduser('~/.hgrc'),
-            os.path.join(confighome, 'hg', 'hgrc'),
+            os.path.expanduser(b'~/.hgrc'),
+            os.path.join(confighome, b'hg', b'hgrc'),
         ]
 
 
@@ -82,7 +82,7 @@
                 continue
             if not os.isatty(fd):
                 continue
-            arri = fcntl.ioctl(fd, TIOCGWINSZ, '\0' * 8)
+            arri = fcntl.ioctl(fd, TIOCGWINSZ, b'\0' * 8)
             height, width = array.array(r'h', arri)[:2]
             if width > 0 and height > 0:
                 return width, height
--- a/mercurial/scmutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/scmutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -158,11 +158,11 @@
 
     if secretlist:
         ui.status(
-            _("no changes found (ignored %d secret changesets)\n")
+            _(b"no changes found (ignored %d secret changesets)\n")
             % len(secretlist)
         )
     else:
-        ui.status(_("no changes found\n"))
+        ui.status(_(b"no changes found\n"))
 
 
 def callcatch(ui, func):
@@ -181,20 +181,20 @@
     # Mercurial-specific first, followed by built-in and library exceptions
     except error.LockHeld as inst:
         if inst.errno == errno.ETIMEDOUT:
-            reason = _('timed out waiting for lock held by %r') % (
+            reason = _(b'timed out waiting for lock held by %r') % (
                 pycompat.bytestr(inst.locker)
             )
         else:
-            reason = _('lock held by %r') % inst.locker
+            reason = _(b'lock held by %r') % inst.locker
         ui.error(
-            _("abort: %s: %s\n")
+            _(b"abort: %s: %s\n")
             % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
         )
         if not inst.locker:
-            ui.error(_("(lock might be very busy)\n"))
+            ui.error(_(b"(lock might be very busy)\n"))
     except error.LockUnavailable as inst:
         ui.error(
-            _("abort: could not lock %s: %s\n")
+            _(b"abort: could not lock %s: %s\n")
             % (
                 inst.desc or stringutil.forcebytestr(inst.filename),
                 encoding.strtolocal(inst.strerror),
@@ -202,57 +202,57 @@
         )
     except error.OutOfBandError as inst:
         if inst.args:
-            msg = _("abort: remote error:\n")
+            msg = _(b"abort: remote error:\n")
         else:
-            msg = _("abort: remote error\n")
+            msg = _(b"abort: remote error\n")
         ui.error(msg)
         if inst.args:
-            ui.error(''.join(inst.args))
+            ui.error(b''.join(inst.args))
         if inst.hint:
-            ui.error('(%s)\n' % inst.hint)
+            ui.error(b'(%s)\n' % inst.hint)
     except error.RepoError as inst:
-        ui.error(_("abort: %s!\n") % inst)
+        ui.error(_(b"abort: %s!\n") % inst)
         if inst.hint:
-            ui.error(_("(%s)\n") % inst.hint)
+            ui.error(_(b"(%s)\n") % inst.hint)
     except error.ResponseError as inst:
-        ui.error(_("abort: %s") % inst.args[0])
+        ui.error(_(b"abort: %s") % inst.args[0])
         msg = inst.args[1]
         if isinstance(msg, type(u'')):
             msg = pycompat.sysbytes(msg)
         if not isinstance(msg, bytes):
-            ui.error(" %r\n" % (msg,))
+            ui.error(b" %r\n" % (msg,))
         elif not msg:
-            ui.error(_(" empty string\n"))
+            ui.error(_(b" empty string\n"))
         else:
-            ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
+            ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
     except error.CensoredNodeError as inst:
-        ui.error(_("abort: file censored %s!\n") % inst)
+        ui.error(_(b"abort: file censored %s!\n") % inst)
     except error.StorageError as inst:
-        ui.error(_("abort: %s!\n") % inst)
+        ui.error(_(b"abort: %s!\n") % inst)
         if inst.hint:
-            ui.error(_("(%s)\n") % inst.hint)
+            ui.error(_(b"(%s)\n") % inst.hint)
     except error.InterventionRequired as inst:
-        ui.error("%s\n" % inst)
+        ui.error(b"%s\n" % inst)
         if inst.hint:
-            ui.error(_("(%s)\n") % inst.hint)
+            ui.error(_(b"(%s)\n") % inst.hint)
         return 1
     except error.WdirUnsupported:
-        ui.error(_("abort: working directory revision cannot be specified\n"))
+        ui.error(_(b"abort: working directory revision cannot be specified\n"))
     except error.Abort as inst:
-        ui.error(_("abort: %s\n") % inst)
+        ui.error(_(b"abort: %s\n") % inst)
         if inst.hint:
-            ui.error(_("(%s)\n") % inst.hint)
+            ui.error(_(b"(%s)\n") % inst.hint)
     except ImportError as inst:
-        ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
+        ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst))
         m = stringutil.forcebytestr(inst).split()[-1]
-        if m in "mpatch bdiff".split():
-            ui.error(_("(did you forget to compile extensions?)\n"))
-        elif m in "zlib".split():
-            ui.error(_("(is your Python install correct?)\n"))
+        if m in b"mpatch bdiff".split():
+            ui.error(_(b"(did you forget to compile extensions?)\n"))
+        elif m in b"zlib".split():
+            ui.error(_(b"(is your Python install correct?)\n"))
     except (IOError, OSError) as inst:
-        if util.safehasattr(inst, "code"):  # HTTPError
-            ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
-        elif util.safehasattr(inst, "reason"):  # URLError or SSLError
+        if util.safehasattr(inst, b"code"):  # HTTPError
+            ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
+        elif util.safehasattr(inst, b"reason"):  # URLError or SSLError
             try:  # usually it is in the form (errno, strerror)
                 reason = inst.reason.args[1]
             except (AttributeError, IndexError):
@@ -261,9 +261,9 @@
             if isinstance(reason, pycompat.unicode):
                 # SSLError of Python 2.7.9 contains a unicode
                 reason = encoding.unitolocal(reason)
-            ui.error(_("abort: error: %s\n") % reason)
+            ui.error(_(b"abort: error: %s\n") % reason)
         elif (
-            util.safehasattr(inst, "args")
+            util.safehasattr(inst, b"args")
             and inst.args
             and inst.args[0] == errno.EPIPE
         ):
@@ -271,18 +271,18 @@
         elif getattr(inst, "strerror", None):  # common IOError or OSError
             if getattr(inst, "filename", None) is not None:
                 ui.error(
-                    _("abort: %s: '%s'\n")
+                    _(b"abort: %s: '%s'\n")
                     % (
                         encoding.strtolocal(inst.strerror),
                         stringutil.forcebytestr(inst.filename),
                     )
                 )
             else:
-                ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
+                ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
         else:  # suspicious IOError
             raise
     except MemoryError:
-        ui.error(_("abort: out of memory\n"))
+        ui.error(_(b"abort: out of memory\n"))
     except SystemExit as inst:
         # Commands shouldn't sys.exit directly, but give a return code.
         # Just in case catch this and and pass exit code to caller.
@@ -294,27 +294,27 @@
 def checknewlabel(repo, lbl, kind):
     # Do not use the "kind" parameter in ui output.
     # It makes strings difficult to translate.
-    if lbl in ['tip', '.', 'null']:
-        raise error.Abort(_("the name '%s' is reserved") % lbl)
-    for c in (':', '\0', '\n', '\r'):
+    if lbl in [b'tip', b'.', b'null']:
+        raise error.Abort(_(b"the name '%s' is reserved") % lbl)
+    for c in (b':', b'\0', b'\n', b'\r'):
         if c in lbl:
             raise error.Abort(
-                _("%r cannot be used in a name") % pycompat.bytestr(c)
+                _(b"%r cannot be used in a name") % pycompat.bytestr(c)
             )
     try:
         int(lbl)
-        raise error.Abort(_("cannot use an integer as a name"))
+        raise error.Abort(_(b"cannot use an integer as a name"))
     except ValueError:
         pass
     if lbl.strip() != lbl:
-        raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
+        raise error.Abort(_(b"leading or trailing whitespace in name %r") % lbl)
 
 
 def checkfilename(f):
     '''Check that the filename f is an acceptable filename for a tracked file'''
-    if '\r' in f or '\n' in f:
+    if b'\r' in f or b'\n' in f:
         raise error.Abort(
-            _("'\\n' and '\\r' disallowed in filenames: %r")
+            _(b"'\\n' and '\\r' disallowed in filenames: %r")
             % pycompat.bytestr(f)
         )
 
@@ -326,23 +326,23 @@
     if abort or warn:
         msg = util.checkwinfilename(f)
         if msg:
-            msg = "%s: %s" % (msg, procutil.shellquote(f))
+            msg = b"%s: %s" % (msg, procutil.shellquote(f))
             if abort:
                 raise error.Abort(msg)
-            ui.warn(_("warning: %s\n") % msg)
+            ui.warn(_(b"warning: %s\n") % msg)
 
 
 def checkportabilityalert(ui):
     '''check if the user's config requests nothing, a warning, or abort for
     non-portable filenames'''
-    val = ui.config('ui', 'portablefilenames')
+    val = ui.config(b'ui', b'portablefilenames')
     lval = val.lower()
     bval = stringutil.parsebool(val)
-    abort = pycompat.iswindows or lval == 'abort'
-    warn = bval or lval == 'warn'
-    if bval is None and not (warn or abort or lval == 'ignore'):
+    abort = pycompat.iswindows or lval == b'abort'
+    warn = bval or lval == b'warn'
+    if bval is None and not (warn or abort or lval == b'ignore'):
         raise error.ConfigError(
-            _("ui.portablefilenames value is invalid ('%s')") % val
+            _(b"ui.portablefilenames value is invalid ('%s')") % val
         )
     return abort, warn
 
@@ -351,8 +351,8 @@
     def __init__(self, ui, abort, dirstate):
         self._ui = ui
         self._abort = abort
-        allfiles = '\0'.join(dirstate)
-        self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
+        allfiles = b'\0'.join(dirstate)
+        self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
         self._dirstate = dirstate
         # The purpose of _newfiles is so that we don't complain about
         # case collisions if someone were to call this object with the
@@ -364,10 +364,10 @@
             return
         fl = encoding.lower(f)
         if fl in self._loweredfiles and f not in self._dirstate:
-            msg = _('possible case-folding collision for %s') % f
+            msg = _(b'possible case-folding collision for %s') % f
             if self._abort:
                 raise error.Abort(msg)
-            self._ui.warn(_("warning: %s\n") % msg)
+            self._ui.warn(_(b"warning: %s\n") % msg)
         self._loweredfiles.add(fl)
         self._newfiles.add(f)
 
@@ -392,7 +392,7 @@
     if revs:
         s = hashlib.sha1()
         for rev in revs:
-            s.update('%d;' % rev)
+            s.update(b'%d;' % rev)
         key = s.digest()
     return key
 
@@ -423,14 +423,14 @@
         adddir(seen_dirs, path)
     for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
         dirs.sort()
-        if '.hg' in dirs:
+        if b'.hg' in dirs:
             yield root  # found a repository
-            qroot = os.path.join(root, '.hg', 'patches')
-            if os.path.isdir(os.path.join(qroot, '.hg')):
+            qroot = os.path.join(root, b'.hg', b'patches')
+            if os.path.isdir(os.path.join(qroot, b'.hg')):
                 yield qroot  # we have a patch queue repo here
             if recurse:
                 # avoid recursing inside the .hg directory
-                dirs.remove('.hg')
+                dirs.remove(b'.hg')
             else:
                 dirs[:] = []  # don't descend further
         elif followsym:
@@ -476,12 +476,12 @@
         hexfunc = hex
     else:
         hexfunc = short
-    return '%d:%s' % (rev, hexfunc(node))
+    return b'%d:%s' % (rev, hexfunc(node))
 
 
 def resolvehexnodeidprefix(repo, prefix):
-    if prefix.startswith('x') and repo.ui.configbool(
-        'experimental', 'revisions.prefixhexnode'
+    if prefix.startswith(b'x') and repo.ui.configbool(
+        b'experimental', b'revisions.prefixhexnode'
     ):
         prefix = prefix[1:]
     try:
@@ -489,11 +489,13 @@
         # This matches the shortesthexnodeidprefix() function below.
         node = repo.unfiltered().changelog._partialmatch(prefix)
     except error.AmbiguousPrefixLookupError:
-        revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
+        revset = repo.ui.config(
+            b'experimental', b'revisions.disambiguatewithin'
+        )
         if revset:
             # Clear config to avoid infinite recursion
             configoverrides = {
-                ('experimental', 'revisions.disambiguatewithin'): None
+                (b'experimental', b'revisions.disambiguatewithin'): None
             }
             with repo.ui.configoverride(configoverrides):
                 revs = repo.anyrevs([revset], user=True)
@@ -540,9 +542,9 @@
 
     def disambiguate(prefix):
         """Disambiguate against revnums."""
-        if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
+        if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
             if mayberevnum(repo, prefix):
-                return 'x' + prefix
+                return b'x' + prefix
             else:
                 return prefix
 
@@ -553,20 +555,20 @@
                 return prefix
 
     cl = repo.unfiltered().changelog
-    revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
+    revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
     if revset:
         revs = None
         if cache is not None:
-            revs = cache.get('disambiguationrevset')
+            revs = cache.get(b'disambiguationrevset')
         if revs is None:
             revs = repo.anyrevs([revset], user=True)
             if cache is not None:
-                cache['disambiguationrevset'] = revs
+                cache[b'disambiguationrevset'] = revs
         if cl.rev(node) in revs:
             hexnode = hex(node)
             nodetree = None
             if cache is not None:
-                nodetree = cache.get('disambiguationnodetree')
+                nodetree = cache.get(b'disambiguationnodetree')
             if not nodetree:
                 try:
                     nodetree = parsers.nodetree(cl.index, len(revs))
@@ -577,7 +579,7 @@
                     for r in revs:
                         nodetree.insert(r)
                     if cache is not None:
-                        cache['disambiguationnodetree'] = nodetree
+                        cache[b'disambiguationnodetree'] = nodetree
             if nodetree is not None:
                 length = max(nodetree.shortest(node), minlength)
                 prefix = hexnode[:length]
@@ -620,17 +622,17 @@
     """
     if not isinstance(symbol, bytes):
         msg = (
-            "symbol (%s of type %s) was not a string, did you mean "
-            "repo[symbol]?" % (symbol, type(symbol))
+            b"symbol (%s of type %s) was not a string, did you mean "
+            b"repo[symbol]?" % (symbol, type(symbol))
         )
         raise error.ProgrammingError(msg)
     try:
-        if symbol in ('.', 'tip', 'null'):
+        if symbol in (b'.', b'tip', b'null'):
             return repo[symbol]
 
         try:
             r = int(symbol)
-            if '%d' % r != symbol:
+            if b'%d' % r != symbol:
                 raise ValueError
             l = len(repo.changelog)
             if r < 0:
@@ -666,7 +668,7 @@
             rev = repo.changelog.rev(node)
             return repo[rev]
 
-        raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
+        raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
 
     except error.WdirUnsupported:
         return repo[None]
@@ -683,7 +685,7 @@
 
     This is extracted in a function to help extensions (eg: evolve) to
     experiment with various message variants."""
-    if repo.filtername.startswith('visible'):
+    if repo.filtername.startswith(b'visible'):
 
         # Check if the changeset is obsolete
         unfilteredrepo = repo.unfiltered()
@@ -694,39 +696,44 @@
         if ctx.obsolete():
             msg = obsutil._getfilteredreason(repo, changeid, ctx)
         else:
-            msg = _("hidden revision '%s'") % changeid
+            msg = _(b"hidden revision '%s'") % changeid
 
-        hint = _('use --hidden to access hidden revisions')
+        hint = _(b'use --hidden to access hidden revisions')
 
         return error.FilteredRepoLookupError(msg, hint=hint)
-    msg = _("filtered revision '%s' (not in '%s' subset)")
+    msg = _(b"filtered revision '%s' (not in '%s' subset)")
     msg %= (changeid, repo.filtername)
     return error.FilteredRepoLookupError(msg)
 
 
-def revsingle(repo, revspec, default='.', localalias=None):
+def revsingle(repo, revspec, default=b'.', localalias=None):
     if not revspec and revspec != 0:
         return repo[default]
 
     l = revrange(repo, [revspec], localalias=localalias)
     if not l:
-        raise error.Abort(_('empty revision set'))
+        raise error.Abort(_(b'empty revision set'))
     return repo[l.last()]
 
 
 def _pairspec(revspec):
     tree = revsetlang.parse(revspec)
-    return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
+    return tree and tree[0] in (
+        b'range',
+        b'rangepre',
+        b'rangepost',
+        b'rangeall',
+    )
 
 
 def revpair(repo, revs):
     if not revs:
-        return repo['.'], repo[None]
+        return repo[b'.'], repo[None]
 
     l = revrange(repo, revs)
 
     if not l:
-        raise error.Abort(_('empty revision range'))
+        raise error.Abort(_(b'empty revision range'))
 
     first = l.first()
     second = l.last()
@@ -736,7 +743,7 @@
         and len(revs) >= 2
         and not all(revrange(repo, [r]) for r in revs)
     ):
-        raise error.Abort(_('empty revision on one side of range'))
+        raise error.Abort(_(b'empty revision on one side of range'))
 
     # if top-level is range expression, the result must always be a pair
     if first == second and len(revs) == 1 and not _pairspec(revs[0]):
@@ -769,7 +776,7 @@
     allspecs = []
     for spec in specs:
         if isinstance(spec, int):
-            spec = revsetlang.formatspec('%d', spec)
+            spec = revsetlang.formatspec(b'%d', spec)
         allspecs.append(spec)
     return repo.anyrevs(allspecs, user=True, localalias=localalias)
 
@@ -808,21 +815,21 @@
     if forcerelativevalue is not None:
         relative = forcerelativevalue
     else:
-        config = repo.ui.config('ui', 'relative-paths')
-        if config == 'legacy':
+        config = repo.ui.config(b'ui', b'relative-paths')
+        if config == b'legacy':
             relative = legacyrelativevalue
         else:
             relative = stringutil.parsebool(config)
             if relative is None:
                 raise error.ConfigError(
-                    _("ui.relative-paths is not a boolean ('%s')") % config
+                    _(b"ui.relative-paths is not a boolean ('%s')") % config
                 )
 
     if relative:
         cwd = repo.getcwd()
         pathto = repo.pathto
         return lambda f: pathto(f, cwd)
-    elif repo.ui.configbool('ui', 'slash'):
+    elif repo.ui.configbool(b'ui', b'slash'):
         return lambda f: f
     else:
         return util.localpath
@@ -839,7 +846,7 @@
     Some commands (e.g. addremove) use this condition for deciding whether to
     print absolute or relative paths.
     '''
-    return bool(pats or opts.get('include') or opts.get('exclude'))
+    return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
 
 
 def expandpats(pats):
@@ -863,30 +870,30 @@
 
 
 def matchandpats(
-    ctx, pats=(), opts=None, globbed=False, default='relpath', badfn=None
+    ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
 ):
     '''Return a matcher and the patterns that were used.
     The matcher will warn about bad matches, unless an alternate badfn callback
     is provided.'''
     if opts is None:
         opts = {}
-    if not globbed and default == 'relpath':
+    if not globbed and default == b'relpath':
         pats = expandpats(pats or [])
 
     uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
 
     def bad(f, msg):
-        ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg))
+        ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
 
     if badfn is None:
         badfn = bad
 
     m = ctx.match(
         pats,
-        opts.get('include'),
-        opts.get('exclude'),
+        opts.get(b'include'),
+        opts.get(b'exclude'),
         default,
-        listsubrepos=opts.get('subrepos'),
+        listsubrepos=opts.get(b'subrepos'),
         badfn=badfn,
     )
 
@@ -896,7 +903,7 @@
 
 
 def match(
-    ctx, pats=(), opts=None, globbed=False, default='relpath', badfn=None
+    ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
 ):
     '''Return a matcher that will warn about bad matches.'''
     return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
@@ -931,7 +938,7 @@
     """return a vfs suitable to save 'orig' file
 
     return None if no special directory is configured"""
-    origbackuppath = ui.config('ui', 'origbackuppath')
+    origbackuppath = ui.config(b'ui', b'origbackuppath')
     if not origbackuppath:
         return None
     return vfs.vfs(repo.wvfs.join(origbackuppath))
@@ -949,16 +956,16 @@
     '''
     origvfs = getorigvfs(ui, repo)
     if origvfs is None:
-        return repo.wjoin(filepath + ".orig")
+        return repo.wjoin(filepath + b".orig")
 
     origbackupdir = origvfs.dirname(filepath)
     if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
-        ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
+        ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
 
         # Remove any files that conflict with the backup file's path
         for f in reversed(list(util.finddirs(filepath))):
             if origvfs.isfileorlink(f):
-                ui.note(_('removing conflicting file: %s\n') % origvfs.join(f))
+                ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
                 origvfs.unlink(f)
                 break
 
@@ -966,7 +973,7 @@
 
     if origvfs.isdir(filepath) and not origvfs.islink(filepath):
         ui.note(
-            _('removing conflicting directory: %s\n') % origvfs.join(filepath)
+            _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
         )
         origvfs.rmtree(filepath, forcibly=True)
 
@@ -1014,7 +1021,7 @@
         return
 
     # translate mapping's other forms
-    if not util.safehasattr(replacements, 'items'):
+    if not util.safehasattr(replacements, b'items'):
         replacements = {(n,): () for n in replacements}
     else:
         # upgrading non tuple "source" to tuple ones for BC
@@ -1037,14 +1044,14 @@
                     continue
                 if len(newnodes) > 1:
                     # usually a split, take the one with biggest rev number
-                    newnode = next(unfi.set('max(%ln)', newnodes)).node()
+                    newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
                 elif len(newnodes) == 0:
                     # move bookmark backwards
                     allreplaced = []
                     for rep in replacements:
                         allreplaced.extend(rep)
                     roots = list(
-                        unfi.set('max((::%n) - %ln)', oldnode, allreplaced)
+                        unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
                     )
                     if roots:
                         newnode = roots[0].node()
@@ -1086,7 +1093,7 @@
             elif newphase < ctx.phase():
                 toadvance.setdefault(newphase, []).append(newnode)
 
-    with repo.transaction('cleanup') as tr:
+    with repo.transaction(b'cleanup') as tr:
         # Move bookmarks
         bmarks = repo._bookmarks
         bmarkchanges = []
@@ -1097,7 +1104,7 @@
             from . import bookmarks  # avoid import cycle
 
             repo.ui.debug(
-                'moving bookmarks %r from %s to %s\n'
+                b'moving bookmarks %r from %s to %s\n'
                 % (
                     pycompat.rapply(pycompat.maybebytestr, oldbmarks),
                     hex(oldnode),
@@ -1106,7 +1113,7 @@
             )
             # Delete divergent bookmarks being parents of related newnodes
             deleterevs = repo.revs(
-                'parents(roots(%ln & (::%n))) - parents(%n)',
+                b'parents(roots(%ln & (::%n))) - parents(%n)',
                 allnewnodes,
                 newnode,
                 oldnode,
@@ -1125,7 +1132,7 @@
         for phase, nodes in toadvance.items():
             phases.advanceboundary(repo, tr, phase, nodes)
 
-        mayusearchived = repo.ui.config('experimental', 'cleanup-as-archived')
+        mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
         # Obsolete or strip nodes
         if obsolete.isenabled(repo, obsolete.createmarkersopt):
             # If a node is already obsoleted, and we want to obsolete it
@@ -1171,13 +1178,13 @@
     if opts is None:
         opts = {}
     m = matcher
-    dry_run = opts.get('dry_run')
+    dry_run = opts.get(b'dry_run')
     try:
-        similarity = float(opts.get('similarity') or 0)
+        similarity = float(opts.get(b'similarity') or 0)
     except ValueError:
-        raise error.Abort(_('similarity must be a number'))
+        raise error.Abort(_(b'similarity must be a number'))
     if similarity < 0 or similarity > 100:
-        raise error.Abort(_('similarity must be between 0 and 100'))
+        raise error.Abort(_(b'similarity must be between 0 and 100'))
     similarity /= 100.0
 
     ret = 0
@@ -1185,7 +1192,7 @@
     wctx = repo[None]
     for subpath in sorted(wctx.substate):
         submatch = matchmod.subdirmatcher(subpath, m)
-        if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
+        if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
             sub = wctx.sub(subpath)
             subprefix = repo.wvfs.reljoin(prefix, subpath)
             subuipathfn = subdiruipathfn(subpath, uipathfn)
@@ -1194,7 +1201,7 @@
                     ret = 1
             except error.LookupError:
                 repo.ui.status(
-                    _("skipping missing subrepository: %s\n")
+                    _(b"skipping missing subrepository: %s\n")
                     % uipathfn(subpath)
                 )
 
@@ -1216,11 +1223,11 @@
     for abs in sorted(toprint):
         if repo.ui.verbose or not m.exact(abs):
             if abs in unknownset:
-                status = _('adding %s\n') % uipathfn(abs)
-                label = 'ui.addremove.added'
+                status = _(b'adding %s\n') % uipathfn(abs)
+                label = b'ui.addremove.added'
             else:
-                status = _('removing %s\n') % uipathfn(abs)
-                label = 'ui.addremove.removed'
+                status = _(b'removing %s\n') % uipathfn(abs)
+                label = b'ui.addremove.removed'
             repo.ui.status(status, label=label)
 
     renames = _findrenames(
@@ -1250,9 +1257,9 @@
         toprint.update(deleted)
         for abs in sorted(toprint):
             if abs in unknownset:
-                status = _('adding %s\n') % abs
+                status = _(b'adding %s\n') % abs
             else:
-                status = _('removing %s\n') % abs
+                status = _(b'removing %s\n') % abs
             repo.ui.status(status)
 
     # TODO: We should probably have the caller pass in uipathfn and apply it to
@@ -1292,16 +1299,16 @@
     )
     for abs, st in walkresults.iteritems():
         dstate = dirstate[abs]
-        if dstate == '?' and audit_path.check(abs):
+        if dstate == b'?' and audit_path.check(abs):
             unknown.append(abs)
-        elif dstate != 'r' and not st:
+        elif dstate != b'r' and not st:
             deleted.append(abs)
-        elif dstate == 'r' and st:
+        elif dstate == b'r' and st:
             forgotten.append(abs)
         # for finding renames
-        elif dstate == 'r' and not st:
+        elif dstate == b'r' and not st:
             removed.append(abs)
-        elif dstate == 'a':
+        elif dstate == b'a':
             added.append(abs)
 
     return added, unknown, deleted, removed, forgotten
@@ -1321,8 +1328,8 @@
             ):
                 repo.ui.status(
                     _(
-                        'recording removal of %s as rename to %s '
-                        '(%d%% similar)\n'
+                        b'recording removal of %s as rename to %s '
+                        b'(%d%% similar)\n'
                     )
                     % (uipathfn(old), uipathfn(new), score * 100)
                 )
@@ -1419,19 +1426,19 @@
     """
     origsrc = repo.dirstate.copied(src) or src
     if dst == origsrc:  # copying back a copy?
-        if repo.dirstate[dst] not in 'mn' and not dryrun:
+        if repo.dirstate[dst] not in b'mn' and not dryrun:
             repo.dirstate.normallookup(dst)
     else:
-        if repo.dirstate[origsrc] == 'a' and origsrc == src:
+        if repo.dirstate[origsrc] == b'a' and origsrc == src:
             if not ui.quiet:
                 ui.warn(
                     _(
-                        "%s has not been committed yet, so no copy "
-                        "data will be stored for %s.\n"
+                        b"%s has not been committed yet, so no copy "
+                        b"data will be stored for %s.\n"
                     )
                     % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
                 )
-            if repo.dirstate[dst] in '?r' and not dryrun:
+            if repo.dirstate[dst] in b'?r' and not dryrun:
                 wctx.add([dst])
         elif not dryrun:
             wctx.copy(origsrc, dst)
@@ -1444,29 +1451,29 @@
     a matcher that doesn't match all the differences between the parent of the
     working copy and newctx.
     """
-    oldctx = repo['.']
+    oldctx = repo[b'.']
     ds = repo.dirstate
     ds.setparents(newctx.node(), nullid)
     copies = dict(ds.copies())
     s = newctx.status(oldctx, match=match)
     for f in s.modified:
-        if ds[f] == 'r':
+        if ds[f] == b'r':
             # modified + removed -> removed
             continue
         ds.normallookup(f)
 
     for f in s.added:
-        if ds[f] == 'r':
+        if ds[f] == b'r':
             # added + removed -> unknown
             ds.drop(f)
-        elif ds[f] != 'a':
+        elif ds[f] != b'a':
             ds.add(f)
 
     for f in s.removed:
-        if ds[f] == 'a':
+        if ds[f] == b'a':
             # removed + added -> normal
             ds.normallookup(f)
-        elif ds[f] != 'r':
+        elif ds[f] != b'r':
             ds.remove(f)
 
     # Merge old parent and old working dir copies
@@ -1477,15 +1484,15 @@
     )
     # Adjust the dirstate copies
     for dst, src in copies.iteritems():
-        if src not in newctx or dst in newctx or ds[dst] != 'a':
+        if src not in newctx or dst in newctx or ds[dst] != b'a':
             src = None
         ds.copy(src, dst)
 
 
 def writerequires(opener, requirements):
-    with opener('requires', 'w', atomictemp=True) as fp:
+    with opener(b'requires', b'w', atomictemp=True) as fp:
         for r in sorted(requirements):
-            fp.write("%s\n" % r)
+            fp.write(b"%s\n" % r)
 
 
 class filecachesubentry(object):
@@ -1664,14 +1671,14 @@
     remote data sources.
     """
 
-    spec = repo.ui.config("extdata", source)
+    spec = repo.ui.config(b"extdata", source)
     if not spec:
-        raise error.Abort(_("unknown extdata source '%s'") % source)
+        raise error.Abort(_(b"unknown extdata source '%s'") % source)
 
     data = {}
     src = proc = None
     try:
-        if spec.startswith("shell:"):
+        if spec.startswith(b"shell:"):
             # external commands should be run relative to the repo root
             cmd = spec[6:]
             proc = subprocess.Popen(
@@ -1687,10 +1694,10 @@
             # treat as a URL or file
             src = url.open(repo.ui, spec)
         for l in src:
-            if " " in l:
-                k, v = l.strip().split(" ", 1)
+            if b" " in l:
+                k, v = l.strip().split(b" ", 1)
             else:
-                k, v = l.strip(), ""
+                k, v = l.strip(), b""
 
             k = encoding.tolocal(k)
             try:
@@ -1709,7 +1716,7 @@
             src.close()
     if proc and proc.returncode != 0:
         raise error.Abort(
-            _("extdata command '%s' failed: %s")
+            _(b"extdata command '%s' failed: %s")
             % (cmd, procutil.explainexit(proc.returncode))
         )
 
@@ -1719,7 +1726,7 @@
 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
     if lock is None:
         raise error.LockInheritanceContractViolation(
-            'lock can only be inherited while held'
+            b'lock can only be inherited while held'
         )
     if environ is None:
         environ = {}
@@ -1735,18 +1742,18 @@
     arguments that ui.system does, and returns the exit code of the
     subprocess."""
     return _locksub(
-        repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args, **kwargs
+        repo, repo.currentwlock(), b'HG_WLOCK_LOCKER', cmd, *args, **kwargs
     )
 
 
 class progress(object):
-    def __init__(self, ui, updatebar, topic, unit="", total=None):
+    def __init__(self, ui, updatebar, topic, unit=b"", total=None):
         self.ui = ui
         self.pos = 0
         self.topic = topic
         self.unit = unit
         self.total = total
-        self.debug = ui.configbool('progress', 'debug')
+        self.debug = ui.configbool(b'progress', b'debug')
         self._updatebar = updatebar
 
     def __enter__(self):
@@ -1755,7 +1762,7 @@
     def __exit__(self, exc_type, exc_value, exc_tb):
         self.complete()
 
-    def update(self, pos, item="", total=None):
+    def update(self, pos, item=b"", total=None):
         assert pos is not None
         if total:
             self.total = total
@@ -1764,37 +1771,37 @@
         if self.debug:
             self._printdebug(item)
 
-    def increment(self, step=1, item="", total=None):
+    def increment(self, step=1, item=b"", total=None):
         self.update(self.pos + step, item, total)
 
     def complete(self):
         self.pos = None
-        self.unit = ""
+        self.unit = b""
         self.total = None
-        self._updatebar(self.topic, self.pos, "", self.unit, self.total)
+        self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
 
     def _printdebug(self, item):
         if self.unit:
-            unit = ' ' + self.unit
+            unit = b' ' + self.unit
         if item:
-            item = ' ' + item
+            item = b' ' + item
 
         if self.total:
             pct = 100.0 * self.pos / self.total
             self.ui.debug(
-                '%s:%s %d/%d%s (%4.2f%%)\n'
+                b'%s:%s %d/%d%s (%4.2f%%)\n'
                 % (self.topic, item, self.pos, self.total, unit, pct)
             )
         else:
-            self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
+            self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
 
 
 def gdinitconfig(ui):
     """helper function to know if a repo should be created as general delta
     """
     # experimental config: format.generaldelta
-    return ui.configbool('format', 'generaldelta') or ui.configbool(
-        'format', 'usegeneraldelta'
+    return ui.configbool(b'format', b'generaldelta') or ui.configbool(
+        b'format', b'usegeneraldelta'
     )
 
 
@@ -1802,7 +1809,7 @@
     """helper function to know if incoming delta should be optimised
     """
     # experimental config: format.generaldelta
-    return ui.configbool('format', 'generaldelta')
+    return ui.configbool(b'format', b'generaldelta')
 
 
 class simplekeyvaluefile(object):
@@ -1811,7 +1818,7 @@
     Keys must be alphanumerics and start with a letter, values must not
     contain '\n' characters"""
 
-    firstlinekey = '__firstline'
+    firstlinekey = b'__firstline'
 
     def __init__(self, vfs, path, keys=None):
         self.vfs = vfs
@@ -1827,7 +1834,7 @@
         d = {}
         if firstlinenonkeyval:
             if not lines:
-                e = _("empty simplekeyvalue file")
+                e = _(b"empty simplekeyvalue file")
                 raise error.CorruptedState(e)
             # we don't want to include '\n' in the __firstline
             d[self.firstlinekey] = lines[0][:-1]
@@ -1838,10 +1845,10 @@
             # lines which only contain '\n' therefore are not skipped
             # by 'if line'
             updatedict = dict(
-                line[:-1].split('=', 1) for line in lines if line.strip()
+                line[:-1].split(b'=', 1) for line in lines if line.strip()
             )
             if self.firstlinekey in updatedict:
-                e = _("%r can't be used as a key")
+                e = _(b"%r can't be used as a key")
                 raise error.CorruptedState(e % self.firstlinekey)
             d.update(updatedict)
         except ValueError as e:
@@ -1857,37 +1864,37 @@
         everything else, as it is, not in a key=value form"""
         lines = []
         if firstline is not None:
-            lines.append('%s\n' % firstline)
+            lines.append(b'%s\n' % firstline)
 
         for k, v in data.items():
             if k == self.firstlinekey:
-                e = "key name '%s' is reserved" % self.firstlinekey
+                e = b"key name '%s' is reserved" % self.firstlinekey
                 raise error.ProgrammingError(e)
             if not k[0:1].isalpha():
-                e = "keys must start with a letter in a key-value file"
+                e = b"keys must start with a letter in a key-value file"
                 raise error.ProgrammingError(e)
             if not k.isalnum():
-                e = "invalid key name in a simple key-value file"
+                e = b"invalid key name in a simple key-value file"
                 raise error.ProgrammingError(e)
-            if '\n' in v:
-                e = "invalid value in a simple key-value file"
+            if b'\n' in v:
+                e = b"invalid value in a simple key-value file"
                 raise error.ProgrammingError(e)
-            lines.append("%s=%s\n" % (k, v))
-        with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
-            fp.write(''.join(lines))
+            lines.append(b"%s=%s\n" % (k, v))
+        with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
+            fp.write(b''.join(lines))
 
 
 _reportobsoletedsource = [
-    'debugobsolete',
-    'pull',
-    'push',
-    'serve',
-    'unbundle',
+    b'debugobsolete',
+    b'pull',
+    b'push',
+    b'serve',
+    b'unbundle',
 ]
 
 _reportnewcssource = [
-    'pull',
-    'unbundle',
+    b'pull',
+    b'unbundle',
 ]
 
 
@@ -1912,7 +1919,7 @@
 _reportstroubledchangesets = True
 
 
-def registersummarycallback(repo, otr, txnname=''):
+def registersummarycallback(repo, otr, txnname=b''):
     """register a callback to issue a summary after the transaction is closed
     """
 
@@ -1937,22 +1944,22 @@
                 repo = repo.filtered(filtername)
             func(repo, tr)
 
-        newcat = '%02i-txnreport' % len(categories)
+        newcat = b'%02i-txnreport' % len(categories)
         otr.addpostclose(newcat, wrapped)
         categories.append(newcat)
         return wrapped
 
     @reportsummary
     def reportchangegroup(repo, tr):
-        cgchangesets = tr.changes.get('changegroup-count-changesets', 0)
-        cgrevisions = tr.changes.get('changegroup-count-revisions', 0)
-        cgfiles = tr.changes.get('changegroup-count-files', 0)
-        cgheads = tr.changes.get('changegroup-count-heads', 0)
+        cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
+        cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
+        cgfiles = tr.changes.get(b'changegroup-count-files', 0)
+        cgheads = tr.changes.get(b'changegroup-count-heads', 0)
         if cgchangesets or cgrevisions or cgfiles:
-            htext = ""
+            htext = b""
             if cgheads:
-                htext = _(" (%+d heads)") % cgheads
-            msg = _("added %d changesets with %d changes to %d files%s\n")
+                htext = _(b" (%+d heads)") % cgheads
+            msg = _(b"added %d changesets with %d changes to %d files%s\n")
             repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
 
     if txmatch(_reportobsoletedsource):
@@ -1960,19 +1967,21 @@
         @reportsummary
         def reportobsoleted(repo, tr):
             obsoleted = obsutil.getobsoleted(repo, tr)
-            newmarkers = len(tr.changes.get('obsmarkers', ()))
+            newmarkers = len(tr.changes.get(b'obsmarkers', ()))
             if newmarkers:
-                repo.ui.status(_('%i new obsolescence markers\n') % newmarkers)
+                repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
             if obsoleted:
-                repo.ui.status(_('obsoleted %i changesets\n') % len(obsoleted))
+                repo.ui.status(_(b'obsoleted %i changesets\n') % len(obsoleted))
 
     if obsolete.isenabled(
         repo, obsolete.createmarkersopt
-    ) and repo.ui.configbool('experimental', 'evolution.report-instabilities'):
+    ) and repo.ui.configbool(
+        b'experimental', b'evolution.report-instabilities'
+    ):
         instabilitytypes = [
-            ('orphan', 'orphan'),
-            ('phase-divergent', 'phasedivergent'),
-            ('content-divergent', 'contentdivergent'),
+            (b'orphan', b'orphan'),
+            (b'phase-divergent', b'phasedivergent'),
+            (b'content-divergent', b'contentdivergent'),
         ]
 
         def getinstabilitycounts(repo):
@@ -2003,7 +2012,7 @@
         @reportsummary
         def reportnewcs(repo, tr):
             """Report the range of new revisions pulled/unbundled."""
-            origrepolen = tr.changes.get('origrepolen', len(repo))
+            origrepolen = tr.changes.get(b'origrepolen', len(repo))
             unfi = repo.unfiltered()
             if origrepolen >= len(unfi):
                 return
@@ -2016,29 +2025,29 @@
                 if minrev == maxrev:
                     revrange = minrev
                 else:
-                    revrange = '%s:%s' % (minrev, maxrev)
-                draft = len(repo.revs('%ld and draft()', revs))
-                secret = len(repo.revs('%ld and secret()', revs))
+                    revrange = b'%s:%s' % (minrev, maxrev)
+                draft = len(repo.revs(b'%ld and draft()', revs))
+                secret = len(repo.revs(b'%ld and secret()', revs))
                 if not (draft or secret):
-                    msg = _('new changesets %s\n') % revrange
+                    msg = _(b'new changesets %s\n') % revrange
                 elif draft and secret:
-                    msg = _('new changesets %s (%d drafts, %d secrets)\n')
+                    msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
                     msg %= (revrange, draft, secret)
                 elif draft:
-                    msg = _('new changesets %s (%d drafts)\n')
+                    msg = _(b'new changesets %s (%d drafts)\n')
                     msg %= (revrange, draft)
                 elif secret:
-                    msg = _('new changesets %s (%d secrets)\n')
+                    msg = _(b'new changesets %s (%d secrets)\n')
                     msg %= (revrange, secret)
                 else:
-                    errormsg = 'entered unreachable condition'
+                    errormsg = b'entered unreachable condition'
                     raise error.ProgrammingError(errormsg)
                 repo.ui.status(msg)
 
             # search new changesets directly pulled as obsolete
-            duplicates = tr.changes.get('revduplicates', ())
+            duplicates = tr.changes.get(b'revduplicates', ())
             obsadded = unfi.revs(
-                '(%d: + %ld) and obsolete()', origrepolen, duplicates
+                b'(%d: + %ld) and obsolete()', origrepolen, duplicates
             )
             cl = repo.changelog
             extinctadded = [r for r in obsadded if r not in cl]
@@ -2046,7 +2055,7 @@
                 # They are not just obsolete, but obsolete and invisible
                 # we call them "extinct" internally but the terms have not been
                 # exposed to users.
-                msg = '(%d other changesets obsolete on arrival)\n'
+                msg = b'(%d other changesets obsolete on arrival)\n'
                 repo.ui.status(msg % len(extinctadded))
 
         @reportsummary
@@ -2054,8 +2063,8 @@
             """Report statistics of phase changes for changesets pre-existing
             pull/unbundle.
             """
-            origrepolen = tr.changes.get('origrepolen', len(repo))
-            phasetracking = tr.changes.get('phases', {})
+            origrepolen = tr.changes.get(b'origrepolen', len(repo))
+            phasetracking = tr.changes.get(b'phases', {})
             if not phasetracking:
                 return
             published = [
@@ -2066,7 +2075,7 @@
             if not published:
                 return
             repo.ui.status(
-                _('%d local changesets published\n') % len(published)
+                _(b'%d local changesets published\n') % len(published)
             )
 
 
@@ -2076,30 +2085,30 @@
     exists as a separate function so that extension can wrap to show more
     information like how to fix instabilities"""
     if delta > 0:
-        return _('%i new %s changesets\n') % (delta, instability)
+        return _(b'%i new %s changesets\n') % (delta, instability)
 
 
 def nodesummaries(repo, nodes, maxnumnodes=4):
     if len(nodes) <= maxnumnodes or repo.ui.verbose:
-        return ' '.join(short(h) for h in nodes)
-    first = ' '.join(short(h) for h in nodes[:maxnumnodes])
-    return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
+        return b' '.join(short(h) for h in nodes)
+    first = b' '.join(short(h) for h in nodes[:maxnumnodes])
+    return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
 
 
 def enforcesinglehead(repo, tr, desc, accountclosed=False):
     """check that no named branch has multiple heads"""
-    if desc in ('strip', 'repair'):
+    if desc in (b'strip', b'repair'):
         # skip the logic during strip
         return
-    visible = repo.filtered('visible')
+    visible = repo.filtered(b'visible')
     # possible improvement: we could restrict the check to affected branch
     bm = visible.branchmap()
     for name in bm:
         heads = bm.branchheads(name, closed=accountclosed)
         if len(heads) > 1:
-            msg = _('rejecting multiple heads on branch "%s"')
+            msg = _(b'rejecting multiple heads on branch "%s"')
             msg %= name
-            hint = _('%d heads: %s')
+            hint = _(b'%d heads: %s')
             hint %= (len(heads), nodesummaries(repo, heads))
             raise error.Abort(msg, hint=hint)
 
@@ -2121,11 +2130,11 @@
     returns a repo object with the required changesets unhidden
     """
     if not repo.filtername or not repo.ui.configbool(
-        'experimental', 'directaccess'
+        b'experimental', b'directaccess'
     ):
         return repo
 
-    if repo.filtername not in ('visible', 'visible-hidden'):
+    if repo.filtername not in (b'visible', b'visible-hidden'):
         return repo
 
     symbols = set()
@@ -2145,20 +2154,20 @@
     if not revs:
         return repo
 
-    if hiddentype == 'warn':
+    if hiddentype == b'warn':
         unfi = repo.unfiltered()
-        revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
+        revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
         repo.ui.warn(
             _(
-                "warning: accessing hidden changesets for write "
-                "operation: %s\n"
+                b"warning: accessing hidden changesets for write "
+                b"operation: %s\n"
             )
             % revstr
         )
 
     # we have to use new filtername to separate branch/tags cache until we can
     # disbale these cache when revisions are dynamically pinned.
-    return repo.filtered('visible-hidden', revs)
+    return repo.filtered(b'visible-hidden', revs)
 
 
 def _getrevsfromsymbols(repo, symbols):
@@ -2169,7 +2178,7 @@
     unficl = unfi.changelog
     cl = repo.changelog
     tiprev = len(unficl)
-    allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
+    allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
     for s in symbols:
         try:
             n = int(s)
@@ -2201,9 +2210,9 @@
     Select revisions reachable by a given bookmark
     """
     return repo.revs(
-        "ancestors(bookmark(%s)) - "
-        "ancestors(head() and not bookmark(%s)) - "
-        "ancestors(bookmark() and not bookmark(%s))",
+        b"ancestors(bookmark(%s)) - "
+        b"ancestors(head() and not bookmark(%s)) - "
+        b"ancestors(bookmark() and not bookmark(%s))",
         mark,
         mark,
         mark,
--- a/mercurial/scmwindows.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/scmwindows.py	Sun Oct 06 09:48:39 2019 -0400
@@ -17,7 +17,7 @@
     import winreg
 
 # MS-DOS 'more' is the only pager available by default on Windows.
-fallbackpager = 'more'
+fallbackpager = b'more'
 
 
 def systemrcpath():
@@ -25,39 +25,39 @@
     rcpath = []
     filename = win32.executablepath()
     # Use mercurial.ini found in directory with hg.exe
-    progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
+    progrc = os.path.join(os.path.dirname(filename), b'mercurial.ini')
     rcpath.append(progrc)
     # Use hgrc.d found in directory with hg.exe
-    progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
+    progrcd = os.path.join(os.path.dirname(filename), b'hgrc.d')
     if os.path.isdir(progrcd):
         for f, kind in util.listdir(progrcd):
-            if f.endswith('.rc'):
+            if f.endswith(b'.rc'):
                 rcpath.append(os.path.join(progrcd, f))
     # else look for a system rcpath in the registry
     value = util.lookupreg(
-        'SOFTWARE\\Mercurial', None, winreg.HKEY_LOCAL_MACHINE
+        b'SOFTWARE\\Mercurial', None, winreg.HKEY_LOCAL_MACHINE
     )
     if not isinstance(value, str) or not value:
         return rcpath
     value = util.localpath(value)
     for p in value.split(pycompat.ospathsep):
-        if p.lower().endswith('mercurial.ini'):
+        if p.lower().endswith(b'mercurial.ini'):
             rcpath.append(p)
         elif os.path.isdir(p):
             for f, kind in util.listdir(p):
-                if f.endswith('.rc'):
+                if f.endswith(b'.rc'):
                     rcpath.append(os.path.join(p, f))
     return rcpath
 
 
 def userrcpath():
     '''return os-specific hgrc search path to the user dir'''
-    home = os.path.expanduser('~')
-    path = [os.path.join(home, 'mercurial.ini'), os.path.join(home, '.hgrc')]
-    userprofile = encoding.environ.get('USERPROFILE')
+    home = os.path.expanduser(b'~')
+    path = [os.path.join(home, b'mercurial.ini'), os.path.join(home, b'.hgrc')]
+    userprofile = encoding.environ.get(b'USERPROFILE')
     if userprofile and userprofile != home:
-        path.append(os.path.join(userprofile, 'mercurial.ini'))
-        path.append(os.path.join(userprofile, '.hgrc'))
+        path.append(os.path.join(userprofile, b'mercurial.ini'))
+        path.append(os.path.join(userprofile, b'.hgrc'))
     return path
 
 
--- a/mercurial/server.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/server.py	Sun Oct 06 09:48:39 2019 -0400
@@ -37,15 +37,15 @@
 
     postexecargs = {}
 
-    if opts['daemon_postexec']:
-        for inst in opts['daemon_postexec']:
-            if inst.startswith('unlink:'):
-                postexecargs['unlink'] = inst[7:]
-            elif inst.startswith('chdir:'):
-                postexecargs['chdir'] = inst[6:]
-            elif inst != 'none':
+    if opts[b'daemon_postexec']:
+        for inst in opts[b'daemon_postexec']:
+            if inst.startswith(b'unlink:'):
+                postexecargs[b'unlink'] = inst[7:]
+            elif inst.startswith(b'chdir:'):
+                postexecargs[b'chdir'] = inst[6:]
+            elif inst != b'none':
                 raise error.Abort(
-                    _('invalid value for --daemon-postexec: %s') % inst
+                    _(b'invalid value for --daemon-postexec: %s') % inst
                 )
 
     # When daemonized on Windows, redirect stdout/stderr to the lockfile (which
@@ -53,13 +53,15 @@
     # read and print the error if this child dies early.  See 594dd384803c.  On
     # other platforms, the child can write to the parent's stdio directly, until
     # it is redirected prior to runfn().
-    if pycompat.iswindows and opts['daemon_postexec']:
-        if 'unlink' in postexecargs and os.path.exists(postexecargs['unlink']):
+    if pycompat.iswindows and opts[b'daemon_postexec']:
+        if b'unlink' in postexecargs and os.path.exists(
+            postexecargs[b'unlink']
+        ):
             procutil.stdout.flush()
             procutil.stderr.flush()
 
             fd = os.open(
-                postexecargs['unlink'], os.O_WRONLY | os.O_APPEND | os.O_BINARY
+                postexecargs[b'unlink'], os.O_WRONLY | os.O_APPEND | os.O_BINARY
             )
             try:
                 os.dup2(fd, procutil.stdout.fileno())
@@ -68,30 +70,30 @@
                 os.close(fd)
 
     def writepid(pid):
-        if opts['pid_file']:
+        if opts[b'pid_file']:
             if appendpid:
-                mode = 'ab'
+                mode = b'ab'
             else:
-                mode = 'wb'
-            fp = open(opts['pid_file'], mode)
-            fp.write('%d\n' % pid)
+                mode = b'wb'
+            fp = open(opts[b'pid_file'], mode)
+            fp.write(b'%d\n' % pid)
             fp.close()
 
-    if opts['daemon'] and not opts['daemon_postexec']:
+    if opts[b'daemon'] and not opts[b'daemon_postexec']:
         # Signal child process startup with file removal
-        lockfd, lockpath = pycompat.mkstemp(prefix='hg-service-')
+        lockfd, lockpath = pycompat.mkstemp(prefix=b'hg-service-')
         os.close(lockfd)
         try:
             if not runargs:
                 runargs = procutil.hgcmd() + pycompat.sysargv[1:]
-            runargs.append('--daemon-postexec=unlink:%s' % lockpath)
+            runargs.append(b'--daemon-postexec=unlink:%s' % lockpath)
             # Don't pass --cwd to the child process, because we've already
             # changed directory.
             for i in pycompat.xrange(1, len(runargs)):
-                if runargs[i].startswith('--cwd='):
+                if runargs[i].startswith(b'--cwd='):
                     del runargs[i]
                     break
-                elif runargs[i].startswith('--cwd'):
+                elif runargs[i].startswith(b'--cwd'):
                     del runargs[i : i + 2]
                     break
 
@@ -103,10 +105,10 @@
                 # If the daemonized process managed to write out an error msg,
                 # report it.
                 if pycompat.iswindows and os.path.exists(lockpath):
-                    with open(lockpath, 'rb') as log:
+                    with open(lockpath, b'rb') as log:
                         for line in log:
                             procutil.stderr.write(line)
-                raise error.Abort(_('child process failed to start'))
+                raise error.Abort(_(b'child process failed to start'))
             writepid(pid)
         finally:
             util.tryunlink(lockpath)
@@ -118,17 +120,17 @@
     if initfn:
         initfn()
 
-    if not opts['daemon']:
+    if not opts[b'daemon']:
         writepid(procutil.getpid())
 
-    if opts['daemon_postexec']:
+    if opts[b'daemon_postexec']:
         try:
             os.setsid()
         except AttributeError:
             pass
 
-        if 'chdir' in postexecargs:
-            os.chdir(postexecargs['chdir'])
+        if b'chdir' in postexecargs:
+            os.chdir(postexecargs[b'chdir'])
         procutil.hidewindow()
         procutil.stdout.flush()
         procutil.stderr.flush()
@@ -154,34 +156,34 @@
 
         # Only unlink after redirecting stdout/stderr, so Windows doesn't
         # complain about a sharing violation.
-        if 'unlink' in postexecargs:
-            os.unlink(postexecargs['unlink'])
+        if b'unlink' in postexecargs:
+            os.unlink(postexecargs[b'unlink'])
 
     if runfn:
         return runfn()
 
 
 _cmdservicemap = {
-    'chgunix': chgserver.chgunixservice,
-    'pipe': commandserver.pipeservice,
-    'unix': commandserver.unixforkingservice,
+    b'chgunix': chgserver.chgunixservice,
+    b'pipe': commandserver.pipeservice,
+    b'unix': commandserver.unixforkingservice,
 }
 
 
 def _createcmdservice(ui, repo, opts):
-    mode = opts['cmdserver']
+    mode = opts[b'cmdserver']
     try:
         servicefn = _cmdservicemap[mode]
     except KeyError:
-        raise error.Abort(_('unknown mode %s') % mode)
+        raise error.Abort(_(b'unknown mode %s') % mode)
     commandserver.setuplogging(ui, repo)
     return servicefn(ui, repo, opts)
 
 
 def _createhgwebservice(ui, repo, opts):
     # this way we can check if something was given in the command-line
-    if opts.get('port'):
-        opts['port'] = util.getport(opts.get('port'))
+    if opts.get(b'port'):
+        opts[b'port'] = util.getport(opts.get(b'port'))
 
     alluis = {ui}
     if repo:
@@ -189,44 +191,44 @@
         alluis.update([repo.baseui, repo.ui])
     else:
         baseui = ui
-    webconf = opts.get('web_conf') or opts.get('webdir_conf')
+    webconf = opts.get(b'web_conf') or opts.get(b'webdir_conf')
     if webconf:
-        if opts.get('subrepos'):
-            raise error.Abort(_('--web-conf cannot be used with --subrepos'))
+        if opts.get(b'subrepos'):
+            raise error.Abort(_(b'--web-conf cannot be used with --subrepos'))
 
         # load server settings (e.g. web.port) to "copied" ui, which allows
         # hgwebdir to reload webconf cleanly
         servui = ui.copy()
-        servui.readconfig(webconf, sections=['web'])
+        servui.readconfig(webconf, sections=[b'web'])
         alluis.add(servui)
-    elif opts.get('subrepos'):
+    elif opts.get(b'subrepos'):
         servui = ui
 
         # If repo is None, hgweb.createapp() already raises a proper abort
         # message as long as webconf is None.
         if repo:
             webconf = dict()
-            cmdutil.addwebdirpath(repo, "", webconf)
+            cmdutil.addwebdirpath(repo, b"", webconf)
     else:
         servui = ui
 
     optlist = (
-        "name templates style address port prefix ipv6"
-        " accesslog errorlog certificate encoding"
+        b"name templates style address port prefix ipv6"
+        b" accesslog errorlog certificate encoding"
     )
     for o in optlist.split():
-        val = opts.get(o, '')
-        if val in (None, ''):  # should check against default options instead
+        val = opts.get(o, b'')
+        if val in (None, b''):  # should check against default options instead
             continue
         for u in alluis:
-            u.setconfig("web", o, val, 'serve')
+            u.setconfig(b"web", o, val, b'serve')
 
     app = hgweb.createapp(baseui, repo, webconf)
     return hgweb.httpservice(servui, app, opts)
 
 
 def createservice(ui, repo, opts):
-    if opts["cmdserver"]:
+    if opts[b"cmdserver"]:
         return _createcmdservice(ui, repo, opts)
     else:
         return _createhgwebservice(ui, repo, opts)
--- a/mercurial/setdiscovery.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/setdiscovery.py	Sun Oct 06 09:48:39 2019 -0400
@@ -140,7 +140,7 @@
 
     def addmissings(self, missings):
         """register some nodes as missing"""
-        newmissing = self._repo.revs('%ld::%ld', missings, self.undecided)
+        newmissing = self._repo.revs(b'%ld::%ld', missings, self.undecided)
         if newmissing:
             self.missing.update(newmissing)
             self.undecided.difference_update(newmissing)
@@ -176,7 +176,7 @@
 
     def stats(self):
         return {
-            'undecided': len(self.undecided),
+            b'undecided': len(self.undecided),
         }
 
     def commonheads(self):
@@ -234,7 +234,7 @@
         revs = self.undecided
         if len(revs) <= size:
             return list(revs)
-        sample = set(self._repo.revs('heads(%ld)', revs))
+        sample = set(self._repo.revs(b'heads(%ld)', revs))
 
         if len(sample) >= size:
             return _limitsample(sample, size, randomize=self.randomize)
@@ -249,7 +249,7 @@
         if len(revs) <= size:
             return list(revs)
         repo = self._repo
-        sample = set(repo.revs('heads(%ld)', revs))
+        sample = set(repo.revs(b'heads(%ld)', revs))
         parentrevs = self._parentsgetter()
 
         # update from heads
@@ -257,7 +257,7 @@
         _updatesample(revs, revsheads, sample, parentrevs)
 
         # update from roots
-        revsroots = set(repo.revs('roots(%ld)', revs))
+        revsroots = set(repo.revs(b'roots(%ld)', revs))
         childrenrevs = self._childrengetter()
         _updatesample(revs, revsroots, sample, childrenrevs)
         assert sample
@@ -308,7 +308,7 @@
         ownheads = [rev for rev in cl.headrevs() if rev != nullrev]
 
     # early exit if we know all the specified remote heads already
-    ui.debug("query 1; heads\n")
+    ui.debug(b"query 1; heads\n")
     roundtrips += 1
     # We also ask remote about all the local heads. That set can be arbitrarily
     # large, so we used to limit it size to `initialsamplesize`. We no longer
@@ -369,8 +369,10 @@
         sample = ownheads
 
     with remote.commandexecutor() as e:
-        fheads = e.callcommand('heads', {})
-        fknown = e.callcommand('known', {'nodes': [clnode(r) for r in sample],})
+        fheads = e.callcommand(b'heads', {})
+        fknown = e.callcommand(
+            b'known', {b'nodes': [clnode(r) for r in sample],}
+        )
 
     srvheadhashes, yesno = fheads.result(), fknown.result()
 
@@ -381,7 +383,7 @@
 
     # start actual discovery (we note this before the next "if" for
     # compatibility reasons)
-    ui.status(_("searching for changes\n"))
+    ui.status(_(b"searching for changes\n"))
 
     knownsrvheads = []  # revnos of remote heads that are known locally
     for node in srvheadhashes:
@@ -395,17 +397,17 @@
             continue
 
     if len(knownsrvheads) == len(srvheadhashes):
-        ui.debug("all remote heads known locally\n")
+        ui.debug(b"all remote heads known locally\n")
         return srvheadhashes, False, srvheadhashes
 
     if len(sample) == len(ownheads) and all(yesno):
-        ui.note(_("all local changesets known remotely\n"))
+        ui.note(_(b"all local changesets known remotely\n"))
         ownheadhashes = [clnode(r) for r in ownheads]
         return ownheadhashes, True, srvheadhashes
 
     # full blown discovery
 
-    randomize = ui.configbool('devel', 'discovery.randomize')
+    randomize = ui.configbool(b'devel', b'discovery.randomize')
     disco = partialdiscovery(
         local, ownheads, remote.limitedarguments, randomize=randomize
     )
@@ -415,21 +417,21 @@
     disco.addinfo(zip(sample, yesno))
 
     full = False
-    progress = ui.makeprogress(_('searching'), unit=_('queries'))
+    progress = ui.makeprogress(_(b'searching'), unit=_(b'queries'))
     while not disco.iscomplete():
 
         if full or disco.hasinfo():
             if full:
-                ui.note(_("sampling from both directions\n"))
+                ui.note(_(b"sampling from both directions\n"))
             else:
-                ui.debug("taking initial sample\n")
+                ui.debug(b"taking initial sample\n")
             samplefunc = disco.takefullsample
             targetsize = fullsamplesize
             if not remote.limitedarguments:
                 fullsamplesize = int(fullsamplesize * samplegrowth)
         else:
             # use even cheaper initial sample
-            ui.debug("taking quick initial sample\n")
+            ui.debug(b"taking quick initial sample\n")
             samplefunc = disco.takequicksample
             targetsize = initialsamplesize
         sample = samplefunc(ownheads, targetsize)
@@ -438,8 +440,8 @@
         progress.update(roundtrips)
         stats = disco.stats()
         ui.debug(
-            "query %i; still undecided: %i, sample size is: %i\n"
-            % (roundtrips, stats['undecided'], len(sample))
+            b"query %i; still undecided: %i, sample size is: %i\n"
+            % (roundtrips, stats[b'undecided'], len(sample))
         )
 
         # indices between sample and externalized version must match
@@ -447,7 +449,7 @@
 
         with remote.commandexecutor() as e:
             yesno = e.callcommand(
-                'known', {'nodes': [clnode(r) for r in sample],}
+                b'known', {b'nodes': [clnode(r) for r in sample],}
             ).result()
 
         full = True
@@ -457,19 +459,19 @@
     result = disco.commonheads()
     elapsed = util.timer() - start
     progress.complete()
-    ui.debug("%d total queries in %.4fs\n" % (roundtrips, elapsed))
+    ui.debug(b"%d total queries in %.4fs\n" % (roundtrips, elapsed))
     msg = (
-        'found %d common and %d unknown server heads,'
-        ' %d roundtrips in %.4fs\n'
+        b'found %d common and %d unknown server heads,'
+        b' %d roundtrips in %.4fs\n'
     )
     missing = set(result) - set(knownsrvheads)
-    ui.log('discovery', msg, len(result), len(missing), roundtrips, elapsed)
+    ui.log(b'discovery', msg, len(result), len(missing), roundtrips, elapsed)
 
     if not result and srvheadhashes != [nullid]:
         if abortwhenunrelated:
-            raise error.Abort(_("repository is unrelated"))
+            raise error.Abort(_(b"repository is unrelated"))
         else:
-            ui.warn(_("warning: repository is unrelated\n"))
+            ui.warn(_(b"warning: repository is unrelated\n"))
         return (
             {nullid},
             True,
--- a/mercurial/shelve.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/shelve.py	Sun Oct 06 09:48:39 2019 -0400
@@ -56,15 +56,15 @@
     stringutil,
 )
 
-backupdir = 'shelve-backup'
-shelvedir = 'shelved'
-shelvefileextensions = ['hg', 'patch', 'shelve']
+backupdir = b'shelve-backup'
+shelvedir = b'shelved'
+shelvefileextensions = [b'hg', b'patch', b'shelve']
 # universal extension is present in all types of shelves
-patchextension = 'patch'
+patchextension = b'patch'
 
 # we never need the user, so we use a
 # generic user for all shelve operations
-shelveuser = 'shelve@localhost'
+shelveuser = b'shelve@localhost'
 
 
 class shelvedfile(object):
@@ -80,7 +80,7 @@
         self.backupvfs = vfsmod.vfs(repo.vfs.join(backupdir))
         self.ui = self.repo.ui
         if filetype:
-            self.fname = name + '.' + filetype
+            self.fname = name + b'.' + filetype
         else:
             self.fname = name
 
@@ -93,9 +93,9 @@
     def backupfilename(self):
         def gennames(base):
             yield base
-            base, ext = base.rsplit('.', 1)
+            base, ext = base.rsplit(b'.', 1)
             for i in itertools.count(1):
-                yield '%s-%d.%s' % (base, i, ext)
+                yield b'%s-%d.%s' % (base, i, ext)
 
         name = self.backupvfs.join(self.fname)
         for n in gennames(name):
@@ -110,13 +110,13 @@
     def stat(self):
         return self.vfs.stat(self.fname)
 
-    def opener(self, mode='rb'):
+    def opener(self, mode=b'rb'):
         try:
             return self.vfs(self.fname, mode)
         except IOError as err:
             if err.errno != errno.ENOENT:
                 raise
-            raise error.Abort(_("shelved change '%s' not found") % self.name)
+            raise error.Abort(_(b"shelved change '%s' not found") % self.name)
 
     def applybundle(self, tr):
         fp = self.opener()
@@ -125,18 +125,18 @@
             if not phases.supportinternal(self.repo):
                 targetphase = phases.secret
             gen = exchange.readbundle(self.repo.ui, fp, self.fname, self.vfs)
-            pretip = self.repo['tip']
+            pretip = self.repo[b'tip']
             bundle2.applybundle(
                 self.repo,
                 gen,
                 tr,
-                source='unshelve',
-                url='bundle:' + self.vfs.join(self.fname),
+                source=b'unshelve',
+                url=b'bundle:' + self.vfs.join(self.fname),
                 targetphase=targetphase,
             )
-            shelvectx = self.repo['tip']
+            shelvectx = self.repo[b'tip']
             if pretip == shelvectx:
-                shelverev = tr.changes['revduplicates'][-1]
+                shelverev = tr.changes[b'revduplicates'][-1]
                 shelvectx = self.repo[shelverev]
             return shelvectx
         finally:
@@ -145,24 +145,24 @@
     def bundlerepo(self):
         path = self.vfs.join(self.fname)
         return bundlerepo.instance(
-            self.repo.baseui, 'bundle://%s+%s' % (self.repo.root, path)
+            self.repo.baseui, b'bundle://%s+%s' % (self.repo.root, path)
         )
 
     def writebundle(self, bases, node):
         cgversion = changegroup.safeversion(self.repo)
-        if cgversion == '01':
-            btype = 'HG10BZ'
+        if cgversion == b'01':
+            btype = b'HG10BZ'
             compression = None
         else:
-            btype = 'HG20'
-            compression = 'BZ'
+            btype = b'HG20'
+            compression = b'BZ'
 
         repo = self.repo.unfiltered()
 
         outgoing = discovery.outgoing(
             repo, missingroots=bases, missingheads=[node]
         )
-        cg = changegroup.makechangegroup(repo, outgoing, cgversion, 'shelve')
+        cg = changegroup.makechangegroup(repo, outgoing, cgversion, b'shelve')
 
         bundle2.writebundle(
             self.ui, cg, self.fname, btype, self.vfs, compression=compression
@@ -183,22 +183,22 @@
     """
 
     _version = 2
-    _filename = 'shelvedstate'
-    _keep = 'keep'
-    _nokeep = 'nokeep'
+    _filename = b'shelvedstate'
+    _keep = b'keep'
+    _nokeep = b'nokeep'
     # colon is essential to differentiate from a real bookmark name
-    _noactivebook = ':no-active-bookmark'
-    _interactive = 'interactive'
+    _noactivebook = b':no-active-bookmark'
+    _interactive = b'interactive'
 
     @classmethod
     def _verifyandtransform(cls, d):
         """Some basic shelvestate syntactic verification and transformation"""
         try:
-            d['originalwctx'] = nodemod.bin(d['originalwctx'])
-            d['pendingctx'] = nodemod.bin(d['pendingctx'])
-            d['parents'] = [nodemod.bin(h) for h in d['parents'].split(' ')]
-            d['nodestoremove'] = [
-                nodemod.bin(h) for h in d['nodestoremove'].split(' ')
+            d[b'originalwctx'] = nodemod.bin(d[b'originalwctx'])
+            d[b'pendingctx'] = nodemod.bin(d[b'pendingctx'])
+            d[b'parents'] = [nodemod.bin(h) for h in d[b'parents'].split(b' ')]
+            d[b'nodestoremove'] = [
+                nodemod.bin(h) for h in d[b'nodestoremove'].split(b' ')
             ]
         except (ValueError, TypeError, KeyError) as err:
             raise error.CorruptedState(pycompat.bytestr(err))
@@ -222,15 +222,15 @@
         # to detemine values of fields (i.g. name is on the second line,
         # originalwctx is on the third and so forth). Please do not change.
         keys = [
-            'version',
-            'name',
-            'originalwctx',
-            'pendingctx',
-            'parents',
-            'nodestoremove',
-            'branchtorestore',
-            'keep',
-            'activebook',
+            b'version',
+            b'name',
+            b'originalwctx',
+            b'pendingctx',
+            b'parents',
+            b'nodestoremove',
+            b'branchtorestore',
+            b'keep',
+            b'activebook',
         ]
         # this is executed only seldomly, so it is not a big deal
         # that we open this file twice
@@ -255,25 +255,25 @@
         else:
             raise error.Abort(
                 _(
-                    'this version of shelve is incompatible '
-                    'with the version used in this repo'
+                    b'this version of shelve is incompatible '
+                    b'with the version used in this repo'
                 )
             )
 
         cls._verifyandtransform(d)
         try:
             obj = cls()
-            obj.name = d['name']
-            obj.wctx = repo[d['originalwctx']]
-            obj.pendingctx = repo[d['pendingctx']]
-            obj.parents = d['parents']
-            obj.nodestoremove = d['nodestoremove']
-            obj.branchtorestore = d.get('branchtorestore', '')
-            obj.keep = d.get('keep') == cls._keep
-            obj.activebookmark = ''
-            if d.get('activebook', '') != cls._noactivebook:
-                obj.activebookmark = d.get('activebook', '')
-            obj.interactive = d.get('interactive') == cls._interactive
+            obj.name = d[b'name']
+            obj.wctx = repo[d[b'originalwctx']]
+            obj.pendingctx = repo[d[b'pendingctx']]
+            obj.parents = d[b'parents']
+            obj.nodestoremove = d[b'nodestoremove']
+            obj.branchtorestore = d.get(b'branchtorestore', b'')
+            obj.keep = d.get(b'keep') == cls._keep
+            obj.activebookmark = b''
+            if d.get(b'activebook', b'') != cls._noactivebook:
+                obj.activebookmark = d.get(b'activebook', b'')
+            obj.interactive = d.get(b'interactive') == cls._interactive
         except (error.RepoLookupError, KeyError) as err:
             raise error.CorruptedState(pycompat.bytestr(err))
 
@@ -289,25 +289,27 @@
         nodestoremove,
         branchtorestore,
         keep=False,
-        activebook='',
+        activebook=b'',
         interactive=False,
     ):
         info = {
-            "name": name,
-            "originalwctx": nodemod.hex(originalwctx.node()),
-            "pendingctx": nodemod.hex(pendingctx.node()),
-            "parents": ' '.join(
+            b"name": name,
+            b"originalwctx": nodemod.hex(originalwctx.node()),
+            b"pendingctx": nodemod.hex(pendingctx.node()),
+            b"parents": b' '.join(
                 [nodemod.hex(p) for p in repo.dirstate.parents()]
             ),
-            "nodestoremove": ' '.join([nodemod.hex(n) for n in nodestoremove]),
-            "branchtorestore": branchtorestore,
-            "keep": cls._keep if keep else cls._nokeep,
-            "activebook": activebook or cls._noactivebook,
+            b"nodestoremove": b' '.join(
+                [nodemod.hex(n) for n in nodestoremove]
+            ),
+            b"branchtorestore": branchtorestore,
+            b"keep": cls._keep if keep else cls._nokeep,
+            b"activebook": activebook or cls._noactivebook,
         }
         if interactive:
-            info['interactive'] = cls._interactive
+            info[b'interactive'] = cls._interactive
         scmutil.simplekeyvaluefile(repo.vfs, cls._filename).write(
-            info, firstline=("%d" % cls._version)
+            info, firstline=(b"%d" % cls._version)
         )
 
     @classmethod
@@ -317,8 +319,8 @@
 
 def cleanupoldbackups(repo):
     vfs = vfsmod.vfs(repo.vfs.join(backupdir))
-    maxbackups = repo.ui.configint('shelve', 'maxbackups')
-    hgfiles = [f for f in vfs.listdir() if f.endswith('.' + patchextension)]
+    maxbackups = repo.ui.configint(b'shelve', b'maxbackups')
+    hgfiles = [f for f in vfs.listdir() if f.endswith(b'.' + patchextension)]
     hgfiles = sorted([(vfs.stat(f)[stat.ST_MTIME], f) for f in hgfiles])
     if maxbackups > 0 and maxbackups < len(hgfiles):
         bordermtime = hgfiles[-maxbackups][0]
@@ -330,7 +332,7 @@
             continue
         base = f[: -(1 + len(patchextension))]
         for ext in shelvefileextensions:
-            vfs.tryunlink(base + '.' + ext)
+            vfs.tryunlink(base + b'.' + ext)
 
 
 def _backupactivebookmark(repo):
@@ -348,7 +350,7 @@
 def _aborttransaction(repo, tr):
     '''Abort current transaction for shelve/unshelve, but keep dirstate
     '''
-    dirstatebackupname = 'dirstate.shelve'
+    dirstatebackupname = b'dirstate.shelve'
     repo.dirstate.savebackup(tr, dirstatebackupname)
     tr.abort()
     repo.dirstate.restorebackup(None, dirstatebackupname)
@@ -360,27 +362,29 @@
     def gennames():
         yield label
         for i in itertools.count(1):
-            yield '%s-%02d' % (label, i)
+            yield b'%s-%02d' % (label, i)
 
-    name = opts.get('name')
-    label = repo._activebookmark or parent.branch() or 'default'
+    name = opts.get(b'name')
+    label = repo._activebookmark or parent.branch() or b'default'
     # slashes aren't allowed in filenames, therefore we rename it
-    label = label.replace('/', '_')
-    label = label.replace('\\', '_')
+    label = label.replace(b'/', b'_')
+    label = label.replace(b'\\', b'_')
     # filenames must not start with '.' as it should not be hidden
-    if label.startswith('.'):
-        label = label.replace('.', '_', 1)
+    if label.startswith(b'.'):
+        label = label.replace(b'.', b'_', 1)
 
     if name:
         if shelvedfile(repo, name, patchextension).exists():
-            e = _("a shelved change named '%s' already exists") % name
+            e = _(b"a shelved change named '%s' already exists") % name
             raise error.Abort(e)
 
         # ensure we are not creating a subdirectory or a hidden file
-        if '/' in name or '\\' in name:
-            raise error.Abort(_('shelved change names can not contain slashes'))
-        if name.startswith('.'):
-            raise error.Abort(_("shelved change names can not start with '.'"))
+        if b'/' in name or b'\\' in name:
+            raise error.Abort(
+                _(b'shelved change names can not contain slashes')
+            )
+        if name.startswith(b'.'):
+            raise error.Abort(_(b"shelved change names can not start with '.'"))
 
     else:
         for n in gennames():
@@ -411,25 +415,25 @@
 
 def getcommitfunc(extra, interactive, editor=False):
     def commitfunc(ui, repo, message, match, opts):
-        hasmq = util.safehasattr(repo, 'mq')
+        hasmq = util.safehasattr(repo, b'mq')
         if hasmq:
             saved, repo.mq.checkapplied = repo.mq.checkapplied, False
 
         targetphase = phases.internal
         if not phases.supportinternal(repo):
             targetphase = phases.secret
-        overrides = {('phases', 'new-commit'): targetphase}
+        overrides = {(b'phases', b'new-commit'): targetphase}
         try:
             editor_ = False
             if editor:
                 editor_ = cmdutil.getcommiteditor(
-                    editform='shelve.shelve', **pycompat.strkwargs(opts)
+                    editform=b'shelve.shelve', **pycompat.strkwargs(opts)
                 )
             with repo.ui.configoverride(overrides):
                 return repo.commit(
                     message,
                     shelveuser,
-                    opts.get('date'),
+                    opts.get(b'date'),
                     match,
                     editor=editor_,
                     extra=extra,
@@ -440,8 +444,8 @@
 
     def interactivecommitfunc(ui, repo, *pats, **opts):
         opts = pycompat.byteskwargs(opts)
-        match = scmutil.match(repo['.'], pats, {})
-        message = opts['message']
+        match = scmutil.match(repo[b'.'], pats, {})
+        message = opts[b'message']
         return commitfunc(ui, repo, message, match, opts)
 
     return interactivecommitfunc if interactive else commitfunc
@@ -451,19 +455,19 @@
     stat = repo.status(match=scmutil.match(repo[None], pats, opts))
     if stat.deleted:
         ui.status(
-            _("nothing changed (%d missing files, see " "'hg status')\n")
+            _(b"nothing changed (%d missing files, see " b"'hg status')\n")
             % len(stat.deleted)
         )
     else:
-        ui.status(_("nothing changed\n"))
+        ui.status(_(b"nothing changed\n"))
 
 
 def _shelvecreatedcommit(repo, node, name, match):
-    info = {'node': nodemod.hex(node)}
-    shelvedfile(repo, name, 'shelve').writeinfo(info)
+    info = {b'node': nodemod.hex(node)}
+    shelvedfile(repo, name, b'shelve').writeinfo(info)
     bases = list(mutableancestors(repo[node]))
-    shelvedfile(repo, name, 'hg').writebundle(bases, node)
-    with shelvedfile(repo, name, patchextension).opener('wb') as fp:
+    shelvedfile(repo, name, b'hg').writebundle(bases, node)
+    with shelvedfile(repo, name, patchextension).opener(b'wb') as fp:
         cmdutil.exportfile(
             repo, [node], fp, opts=mdiff.diffopts(git=True), match=match
         )
@@ -472,7 +476,7 @@
 def _includeunknownfiles(repo, pats, opts, extra):
     s = repo.status(match=scmutil.match(repo[None], pats, opts), unknown=True)
     if s.unknown:
-        extra['shelve_unknown'] = '\0'.join(s.unknown)
+        extra[b'shelve_unknown'] = b'\0'.join(s.unknown)
         repo[None].add(s.unknown)
 
 
@@ -497,12 +501,12 @@
     origbranch = wctx.branch()
 
     if parent.node() != nodemod.nullid:
-        desc = "changes to: %s" % parent.description().split('\n', 1)[0]
+        desc = b"changes to: %s" % parent.description().split(b'\n', 1)[0]
     else:
-        desc = '(changes in empty repository)'
+        desc = b'(changes in empty repository)'
 
-    if not opts.get('message'):
-        opts['message'] = desc
+    if not opts.get(b'message'):
+        opts[b'message'] = desc
 
     lock = tr = activebookmark = None
     try:
@@ -510,23 +514,23 @@
 
         # use an uncommitted transaction to generate the bundle to avoid
         # pull races. ensure we don't print the abort message to stderr.
-        tr = repo.transaction('shelve', report=lambda x: None)
+        tr = repo.transaction(b'shelve', report=lambda x: None)
 
-        interactive = opts.get('interactive', False)
-        includeunknown = opts.get('unknown', False) and not opts.get(
-            'addremove', False
+        interactive = opts.get(b'interactive', False)
+        includeunknown = opts.get(b'unknown', False) and not opts.get(
+            b'addremove', False
         )
 
         name = getshelvename(repo, parent, opts)
         activebookmark = _backupactivebookmark(repo)
-        extra = {'internal': 'shelve'}
+        extra = {b'internal': b'shelve'}
         if includeunknown:
             _includeunknownfiles(repo, pats, opts, extra)
 
         if _iswctxonnewbranch(repo) and not _isbareshelve(pats, opts):
             # In non-bare shelve we don't store newly created branch
             # at bundled commit
-            repo.dirstate.setbranch(repo['.'].branch())
+            repo.dirstate.setbranch(repo[b'.'].branch())
 
         commitfunc = getcommitfunc(extra, interactive, editor=True)
         if not interactive:
@@ -554,13 +558,13 @@
 
         if ui.formatted():
             desc = stringutil.ellipsis(desc, ui.termwidth())
-        ui.status(_('shelved as %s\n') % name)
-        if opts['keep']:
+        ui.status(_(b'shelved as %s\n') % name)
+        if opts[b'keep']:
             with repo.dirstate.parentchange():
                 scmutil.movedirstate(repo, parent, match)
         else:
             hg.update(repo, parent.node())
-        if origbranch != repo['.'].branch() and not _isbareshelve(pats, opts):
+        if origbranch != repo[b'.'].branch() and not _isbareshelve(pats, opts):
             repo.dirstate.setbranch(origbranch)
 
         _finishshelve(repo, tr)
@@ -572,14 +576,14 @@
 def _isbareshelve(pats, opts):
     return (
         not pats
-        and not opts.get('interactive', False)
-        and not opts.get('include', False)
-        and not opts.get('exclude', False)
+        and not opts.get(b'interactive', False)
+        and not opts.get(b'include', False)
+        and not opts.get(b'exclude', False)
     )
 
 
 def _iswctxonnewbranch(repo):
-    return repo[None].branch() != repo['.'].branch()
+    return repo[None].branch() != repo[b'.'].branch()
 
 
 def cleanupcmd(ui, repo):
@@ -587,7 +591,7 @@
 
     with repo.wlock():
         for (name, _type) in repo.vfs.readdir(shelvedir):
-            suffix = name.rsplit('.', 1)[-1]
+            suffix = name.rsplit(b'.', 1)[-1]
             if suffix in shelvefileextensions:
                 shelvedfile(repo, name).movetobackup()
             cleanupoldbackups(repo)
@@ -596,7 +600,7 @@
 def deletecmd(ui, repo, pats):
     """subcommand that deletes a specific shelve"""
     if not pats:
-        raise error.Abort(_('no shelved changes specified!'))
+        raise error.Abort(_(b'no shelved changes specified!'))
     with repo.wlock():
         try:
             for name in pats:
@@ -613,7 +617,7 @@
         except OSError as err:
             if err.errno != errno.ENOENT:
                 raise
-            raise error.Abort(_("shelved change '%s' not found") % name)
+            raise error.Abort(_(b"shelved change '%s' not found") % name)
 
 
 def listshelves(repo):
@@ -626,7 +630,7 @@
         return []
     info = []
     for (name, _type) in names:
-        pfx, sfx = name.rsplit('.', 1)
+        pfx, sfx = name.rsplit(b'.', 1)
         if not pfx or sfx != patchextension:
             continue
         st = shelvedfile(repo, name).stat()
@@ -640,43 +644,43 @@
     width = 80
     if not ui.plain():
         width = ui.termwidth()
-    namelabel = 'shelve.newest'
-    ui.pager('shelve')
+    namelabel = b'shelve.newest'
+    ui.pager(b'shelve')
     for mtime, name in listshelves(repo):
         sname = util.split(name)[1]
         if pats and sname not in pats:
             continue
         ui.write(sname, label=namelabel)
-        namelabel = 'shelve.name'
+        namelabel = b'shelve.name'
         if ui.quiet:
-            ui.write('\n')
+            ui.write(b'\n')
             continue
-        ui.write(' ' * (16 - len(sname)))
+        ui.write(b' ' * (16 - len(sname)))
         used = 16
         date = dateutil.makedate(mtime)
-        age = '(%s)' % templatefilters.age(date, abbrev=True)
-        ui.write(age, label='shelve.age')
-        ui.write(' ' * (12 - len(age)))
+        age = b'(%s)' % templatefilters.age(date, abbrev=True)
+        ui.write(age, label=b'shelve.age')
+        ui.write(b' ' * (12 - len(age)))
         used += 12
-        with open(name + '.' + patchextension, 'rb') as fp:
+        with open(name + b'.' + patchextension, b'rb') as fp:
             while True:
                 line = fp.readline()
                 if not line:
                     break
-                if not line.startswith('#'):
+                if not line.startswith(b'#'):
                     desc = line.rstrip()
                     if ui.formatted():
                         desc = stringutil.ellipsis(desc, width - used)
                     ui.write(desc)
                     break
-            ui.write('\n')
-            if not (opts['patch'] or opts['stat']):
+            ui.write(b'\n')
+            if not (opts[b'patch'] or opts[b'stat']):
                 continue
             difflines = fp.readlines()
-            if opts['patch']:
+            if opts[b'patch']:
                 for chunk, label in patch.difflabel(iter, difflines):
                     ui.write(chunk, label=label)
-            if opts['stat']:
+            if opts[b'stat']:
                 for chunk, label in patch.diffstatui(difflines, width=width):
                     ui.write(chunk, label=label)
 
@@ -686,14 +690,14 @@
     if len(pats) == 0:
         shelves = listshelves(repo)
         if not shelves:
-            raise error.Abort(_("there are no shelves to show"))
+            raise error.Abort(_(b"there are no shelves to show"))
         mtime, name = shelves[0]
         sname = util.split(name)[1]
         pats = [sname]
 
     for shelfname in pats:
         if not shelvedfile(repo, shelfname, patchextension).exists():
-            raise error.Abort(_("cannot find shelf %s") % shelfname)
+            raise error.Abort(_(b"cannot find shelf %s") % shelfname)
 
     listcmd(ui, repo, pats, opts)
 
@@ -702,34 +706,35 @@
     """check parent while resuming an unshelve"""
     if state.parents != repo.dirstate.parents():
         raise error.Abort(
-            _('working directory parents do not match unshelve ' 'state')
+            _(b'working directory parents do not match unshelve ' b'state')
         )
 
 
 def _loadshelvedstate(ui, repo, opts):
     try:
         state = shelvedstate.load(repo)
-        if opts.get('keep') is None:
-            opts['keep'] = state.keep
+        if opts.get(b'keep') is None:
+            opts[b'keep'] = state.keep
     except IOError as err:
         if err.errno != errno.ENOENT:
             raise
-        cmdutil.wrongtooltocontinue(repo, _('unshelve'))
+        cmdutil.wrongtooltocontinue(repo, _(b'unshelve'))
     except error.CorruptedState as err:
-        ui.debug(pycompat.bytestr(err) + '\n')
-        if opts.get('continue'):
-            msg = _('corrupted shelved state file')
+        ui.debug(pycompat.bytestr(err) + b'\n')
+        if opts.get(b'continue'):
+            msg = _(b'corrupted shelved state file')
             hint = _(
-                'please run hg unshelve --abort to abort unshelve ' 'operation'
+                b'please run hg unshelve --abort to abort unshelve '
+                b'operation'
             )
             raise error.Abort(msg, hint=hint)
-        elif opts.get('abort'):
+        elif opts.get(b'abort'):
             shelvedstate.clear(repo)
             raise error.Abort(
                 _(
-                    'could not read shelved state file, your '
-                    'working copy may be in an unexpected state\n'
-                    'please update to some commit\n'
+                    b'could not read shelved state file, your '
+                    b'working copy may be in an unexpected state\n'
+                    b'please update to some commit\n'
                 )
             )
     return state
@@ -747,24 +752,24 @@
             mergefiles(ui, repo, state.wctx, state.pendingctx)
             if not phases.supportinternal(repo):
                 repair.strip(
-                    ui, repo, state.nodestoremove, backup=False, topic='shelve'
+                    ui, repo, state.nodestoremove, backup=False, topic=b'shelve'
                 )
         finally:
             shelvedstate.clear(repo)
-            ui.warn(_("unshelve of '%s' aborted\n") % state.name)
+            ui.warn(_(b"unshelve of '%s' aborted\n") % state.name)
 
 
 def hgabortunshelve(ui, repo):
     """logic to  abort unshelve using 'hg abort"""
     with repo.wlock():
-        state = _loadshelvedstate(ui, repo, {'abort': True})
+        state = _loadshelvedstate(ui, repo, {b'abort': True})
         return unshelveabort(ui, repo, state)
 
 
 def mergefiles(ui, repo, wctx, shelvectx):
     """updates to wctx and merges the changes from shelvectx into the
     dirstate."""
-    with ui.configoverride({('ui', 'quiet'): True}):
+    with ui.configoverride({(b'ui', b'quiet'): True}):
         hg.update(repo, wctx.node())
         ui.pushbuffer(True)
         cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents())
@@ -775,13 +780,13 @@
     if branchtorestore and branchtorestore != repo.dirstate.branch():
         repo.dirstate.setbranch(branchtorestore)
         ui.status(
-            _('marked working directory as branch %s\n') % branchtorestore
+            _(b'marked working directory as branch %s\n') % branchtorestore
         )
 
 
 def unshelvecleanup(ui, repo, name, opts):
     """remove related files after an unshelve"""
-    if not opts.get('keep'):
+    if not opts.get(b'keep'):
         for filetype in shelvefileextensions:
             shfile = shelvedfile(repo, name, filetype)
             if shfile.exists():
@@ -800,8 +805,8 @@
         ms = merge.mergestate.read(repo)
         if list(ms.unresolved()):
             raise error.Abort(
-                _("unresolved conflicts, can't continue"),
-                hint=_("see 'hg resolve', then 'hg unshelve --continue'"),
+                _(b"unresolved conflicts, can't continue"),
+                hint=_(b"see 'hg resolve', then 'hg unshelve --continue'"),
             )
 
         shelvectx = repo[state.parents[1]]
@@ -814,8 +819,8 @@
         targetphase = phases.internal
         if not phases.supportinternal(repo):
             targetphase = phases.secret
-        overrides = {('phases', 'new-commit'): targetphase}
-        with repo.ui.configoverride(overrides, 'unshelve'):
+        overrides = {(b'phases', b'new-commit'): targetphase}
+        with repo.ui.configoverride(overrides, b'unshelve'):
             with repo.dirstate.parentchange():
                 repo.setparents(state.parents[0], nodemod.nullid)
                 newnode, ispartialunshelve = _createunshelvectx(
@@ -829,8 +834,8 @@
             merge.mergestate.clean(repo)
             shelvectx = state.pendingctx
             msg = _(
-                'note: unshelved changes already existed '
-                'in the working copy\n'
+                b'note: unshelved changes already existed '
+                b'in the working copy\n'
             )
             ui.status(msg)
         else:
@@ -844,20 +849,20 @@
 
         if not phases.supportinternal(repo):
             repair.strip(
-                ui, repo, state.nodestoremove, backup=False, topic='shelve'
+                ui, repo, state.nodestoremove, backup=False, topic=b'shelve'
             )
         shelvedstate.clear(repo)
         if not ispartialunshelve:
             unshelvecleanup(ui, repo, state.name, opts)
         _restoreactivebookmark(repo, state.activebookmark)
-        ui.status(_("unshelve of '%s' complete\n") % state.name)
+        ui.status(_(b"unshelve of '%s' complete\n") % state.name)
 
 
 def hgcontinueunshelve(ui, repo):
     """logic to resume unshelve using 'hg continue'"""
     with repo.wlock():
-        state = _loadshelvedstate(ui, repo, {'continue': True})
-        return unshelvecontinue(ui, repo, state, {'keep': state.keep})
+        state = _loadshelvedstate(ui, repo, {b'continue': True})
+        return unshelvecontinue(ui, repo, state, {b'keep': state.keep})
 
 
 def _commitworkingcopychanges(ui, repo, opts, tmpwctx):
@@ -870,16 +875,16 @@
         return tmpwctx, addedbefore
     ui.status(
         _(
-            "temporarily committing pending changes "
-            "(restore with 'hg unshelve --abort')\n"
+            b"temporarily committing pending changes "
+            b"(restore with 'hg unshelve --abort')\n"
         )
     )
-    extra = {'internal': 'shelve'}
+    extra = {b'internal': b'shelve'}
     commitfunc = getcommitfunc(extra=extra, interactive=False, editor=False)
     tempopts = {}
-    tempopts['message'] = "pending changes temporary commit"
-    tempopts['date'] = opts.get('date')
-    with ui.configoverride({('ui', 'quiet'): True}):
+    tempopts[b'message'] = b"pending changes temporary commit"
+    tempopts[b'date'] = opts.get(b'date')
+    with ui.configoverride({(b'ui', b'quiet'): True}):
         node = cmdutil.commit(ui, repo, commitfunc, [], tempopts)
     tmpwctx = repo[node]
     return tmpwctx, addedbefore
@@ -889,16 +894,16 @@
     """Recreate commit in the repository during the unshelve"""
     repo = repo.unfiltered()
     node = None
-    if shelvedfile(repo, basename, 'shelve').exists():
-        node = shelvedfile(repo, basename, 'shelve').readinfo()['node']
+    if shelvedfile(repo, basename, b'shelve').exists():
+        node = shelvedfile(repo, basename, b'shelve').readinfo()[b'node']
     if node is None or node not in repo:
-        with ui.configoverride({('ui', 'quiet'): True}):
-            shelvectx = shelvedfile(repo, basename, 'hg').applybundle(tr)
+        with ui.configoverride({(b'ui', b'quiet'): True}):
+            shelvectx = shelvedfile(repo, basename, b'hg').applybundle(tr)
         # We might not strip the unbundled changeset, so we should keep track of
         # the unshelve node in case we need to reuse it (eg: unshelve --keep)
         if node is None:
-            info = {'node': nodemod.hex(shelvectx.node())}
-            shelvedfile(repo, basename, 'shelve').writeinfo(info)
+            info = {b'node': nodemod.hex(shelvectx.node())}
+            shelvedfile(repo, basename, b'shelve').writeinfo(info)
     else:
         shelvectx = repo[node]
 
@@ -926,8 +931,8 @@
     Here, we return both the newnode which is created interactively and a
     bool to know whether the shelve is partly done or completely done.
     """
-    opts['message'] = shelvectx.description()
-    opts['interactive-unshelve'] = True
+    opts[b'message'] = shelvectx.description()
+    opts[b'interactive-unshelve'] = True
     pats = []
     if not interactive:
         newnode = repo.commit(
@@ -977,23 +982,23 @@
     """Rebase restored commit from its original location to a destination"""
     # If the shelve is not immediately on top of the commit
     # we'll be merging with, rebase it to be on top.
-    interactive = opts.get('interactive')
+    interactive = opts.get(b'interactive')
     if tmpwctx.node() == shelvectx.p1().node() and not interactive:
         # We won't skip on interactive mode because, the user might want to
         # unshelve certain changes only.
         return shelvectx, False
 
     overrides = {
-        ('ui', 'forcemerge'): opts.get('tool', ''),
-        ('phases', 'new-commit'): phases.secret,
+        (b'ui', b'forcemerge'): opts.get(b'tool', b''),
+        (b'phases', b'new-commit'): phases.secret,
     }
-    with repo.ui.configoverride(overrides, 'unshelve'):
-        ui.status(_('rebasing shelved changes\n'))
+    with repo.ui.configoverride(overrides, b'unshelve'):
+        ui.status(_(b'rebasing shelved changes\n'))
         stats = merge.graft(
             repo,
             shelvectx,
             shelvectx.p1(),
-            labels=['shelve', 'working-copy'],
+            labels=[b'shelve', b'working-copy'],
             keepconflictparent=True,
         )
         if stats.unresolvedcount:
@@ -1010,14 +1015,14 @@
                 tmpwctx,
                 nodestoremove,
                 branchtorestore,
-                opts.get('keep'),
+                opts.get(b'keep'),
                 activebookmark,
                 interactive,
             )
             raise error.InterventionRequired(
                 _(
-                    "unresolved conflicts (see 'hg resolve', then "
-                    "'hg unshelve --continue')"
+                    b"unresolved conflicts (see 'hg resolve', then "
+                    b"'hg unshelve --continue')"
                 )
             )
 
@@ -1034,8 +1039,8 @@
             merge.mergestate.clean(repo)
             shelvectx = tmpwctx
             msg = _(
-                'note: unshelved changes already existed '
-                'in the working copy\n'
+                b'note: unshelved changes already existed '
+                b'in the working copy\n'
             )
             ui.status(msg)
         else:
@@ -1048,10 +1053,10 @@
 def _forgetunknownfiles(repo, shelvectx, addedbefore):
     # Forget any files that were unknown before the shelve, unknown before
     # unshelve started, but are now added.
-    shelveunknown = shelvectx.extra().get('shelve_unknown')
+    shelveunknown = shelvectx.extra().get(b'shelve_unknown')
     if not shelveunknown:
         return
-    shelveunknown = frozenset(shelveunknown.split('\0'))
+    shelveunknown = frozenset(shelveunknown.split(b'\0'))
     addedafter = frozenset(repo.status().added)
     toforget = (addedafter & shelveunknown) - addedbefore
     repo[None].forget(toforget)
@@ -1074,66 +1079,66 @@
     shelvetouched = set(shelvectx.files())
     intersection = wcdeleted.intersection(shelvetouched)
     if intersection:
-        m = _("shelved change touches missing files")
-        hint = _("run hg status to see which files are missing")
+        m = _(b"shelved change touches missing files")
+        hint = _(b"run hg status to see which files are missing")
         raise error.Abort(m, hint=hint)
 
 
 def dounshelve(ui, repo, *shelved, **opts):
     opts = pycompat.byteskwargs(opts)
-    abortf = opts.get('abort')
-    continuef = opts.get('continue')
-    interactive = opts.get('interactive')
+    abortf = opts.get(b'abort')
+    continuef = opts.get(b'continue')
+    interactive = opts.get(b'interactive')
     if not abortf and not continuef:
         cmdutil.checkunfinished(repo)
     shelved = list(shelved)
-    if opts.get("name"):
-        shelved.append(opts["name"])
+    if opts.get(b"name"):
+        shelved.append(opts[b"name"])
 
-    if interactive and opts.get('keep'):
-        raise error.Abort(_('--keep on --interactive is not yet supported'))
+    if interactive and opts.get(b'keep'):
+        raise error.Abort(_(b'--keep on --interactive is not yet supported'))
     if abortf or continuef:
         if abortf and continuef:
-            raise error.Abort(_('cannot use both abort and continue'))
+            raise error.Abort(_(b'cannot use both abort and continue'))
         if shelved:
             raise error.Abort(
                 _(
-                    'cannot combine abort/continue with '
-                    'naming a shelved change'
+                    b'cannot combine abort/continue with '
+                    b'naming a shelved change'
                 )
             )
-        if abortf and opts.get('tool', False):
-            ui.warn(_('tool option will be ignored\n'))
+        if abortf and opts.get(b'tool', False):
+            ui.warn(_(b'tool option will be ignored\n'))
 
         state = _loadshelvedstate(ui, repo, opts)
         if abortf:
             return unshelveabort(ui, repo, state)
         elif continuef and interactive:
-            raise error.Abort(_('cannot use both continue and interactive'))
+            raise error.Abort(_(b'cannot use both continue and interactive'))
         elif continuef:
             return unshelvecontinue(ui, repo, state, opts)
     elif len(shelved) > 1:
-        raise error.Abort(_('can only unshelve one change at a time'))
+        raise error.Abort(_(b'can only unshelve one change at a time'))
     elif not shelved:
         shelved = listshelves(repo)
         if not shelved:
-            raise error.Abort(_('no shelved changes to apply!'))
+            raise error.Abort(_(b'no shelved changes to apply!'))
         basename = util.split(shelved[0][1])[1]
-        ui.status(_("unshelving change '%s'\n") % basename)
+        ui.status(_(b"unshelving change '%s'\n") % basename)
     else:
         basename = shelved[0]
 
     if not shelvedfile(repo, basename, patchextension).exists():
-        raise error.Abort(_("shelved change '%s' not found") % basename)
+        raise error.Abort(_(b"shelved change '%s' not found") % basename)
 
     repo = repo.unfiltered()
     lock = tr = None
     try:
         lock = repo.lock()
-        tr = repo.transaction('unshelve', report=lambda x: None)
+        tr = repo.transaction(b'unshelve', report=lambda x: None)
         oldtiprev = len(repo)
 
-        pctx = repo['.']
+        pctx = repo[b'.']
         tmpwctx = pctx
         # The goal is to have a commit structure like so:
         # ...-> pctx -> tmpwctx -> shelvectx
@@ -1147,7 +1152,7 @@
         )
         repo, shelvectx = _unshelverestorecommit(ui, repo, tr, basename)
         _checkunshelveuntrackedproblems(ui, repo, shelvectx)
-        branchtorestore = ''
+        branchtorestore = b''
         if shelvectx.branch() != shelvectx.p1().branch():
             branchtorestore = shelvectx.branch()
 
@@ -1164,8 +1169,8 @@
             branchtorestore,
             activebookmark,
         )
-        overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
-        with ui.configoverride(overrides, 'unshelve'):
+        overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
+        with ui.configoverride(overrides, b'unshelve'):
             mergefiles(ui, repo, pctx, shelvectx)
         restorebranch(ui, repo, branchtorestore)
         shelvedstate.clear(repo)
--- a/mercurial/similar.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/similar.py	Sun Oct 06 09:48:39 2019 -0400
@@ -21,9 +21,9 @@
     # We use hash() to discard fctx.data() from memory.
     hashes = {}
     progress = repo.ui.makeprogress(
-        _('searching for exact renames'),
+        _(b'searching for exact renames'),
         total=(len(added) + len(removed)),
-        unit=_('files'),
+        unit=_(b'files'),
     )
     for fctx in removed:
         progress.increment()
@@ -81,7 +81,7 @@
     '''
     copies = {}
     progress = repo.ui.makeprogress(
-        _('searching for similar files'), unit=_('files'), total=len(removed)
+        _(b'searching for similar files'), unit=_(b'files'), total=len(removed)
     )
     for r in removed:
         progress.increment()
--- a/mercurial/simplemerge.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/simplemerge.py	Sun Oct 06 09:48:39 2019 -0400
@@ -93,9 +93,9 @@
         name_a=None,
         name_b=None,
         name_base=None,
-        start_marker='<<<<<<<',
-        mid_marker='=======',
-        end_marker='>>>>>>>',
+        start_marker=b'<<<<<<<',
+        mid_marker=b'=======',
+        end_marker=b'>>>>>>>',
         base_marker=None,
         localorother=None,
         minimize=False,
@@ -103,37 +103,37 @@
         """Return merge in cvs-like form.
         """
         self.conflicts = False
-        newline = '\n'
+        newline = b'\n'
         if len(self.a) > 0:
-            if self.a[0].endswith('\r\n'):
-                newline = '\r\n'
-            elif self.a[0].endswith('\r'):
-                newline = '\r'
+            if self.a[0].endswith(b'\r\n'):
+                newline = b'\r\n'
+            elif self.a[0].endswith(b'\r'):
+                newline = b'\r'
         if name_a and start_marker:
-            start_marker = start_marker + ' ' + name_a
+            start_marker = start_marker + b' ' + name_a
         if name_b and end_marker:
-            end_marker = end_marker + ' ' + name_b
+            end_marker = end_marker + b' ' + name_b
         if name_base and base_marker:
-            base_marker = base_marker + ' ' + name_base
+            base_marker = base_marker + b' ' + name_base
         merge_regions = self.merge_regions()
         if minimize:
             merge_regions = self.minimize(merge_regions)
         for t in merge_regions:
             what = t[0]
-            if what == 'unchanged':
+            if what == b'unchanged':
                 for i in range(t[1], t[2]):
                     yield self.base[i]
-            elif what == 'a' or what == 'same':
+            elif what == b'a' or what == b'same':
                 for i in range(t[1], t[2]):
                     yield self.a[i]
-            elif what == 'b':
+            elif what == b'b':
                 for i in range(t[1], t[2]):
                     yield self.b[i]
-            elif what == 'conflict':
-                if localorother == 'local':
+            elif what == b'conflict':
+                if localorother == b'local':
                     for i in range(t[3], t[4]):
                         yield self.a[i]
-                elif localorother == 'other':
+                elif localorother == b'other':
                     for i in range(t[5], t[6]):
                         yield self.b[i]
                 else:
@@ -175,13 +175,13 @@
         """
         for t in self.merge_regions():
             what = t[0]
-            if what == 'unchanged':
+            if what == b'unchanged':
                 yield what, self.base[t[1] : t[2]]
-            elif what == 'a' or what == 'same':
+            elif what == b'a' or what == b'same':
                 yield what, self.a[t[1] : t[2]]
-            elif what == 'b':
+            elif what == b'b':
                 yield what, self.b[t[1] : t[2]]
-            elif what == 'conflict':
+            elif what == b'conflict':
                 yield (
                     what,
                     self.base[t[1] : t[2]],
@@ -253,15 +253,15 @@
                 same = compare_range(self.a, ia, amatch, self.b, ib, bmatch)
 
                 if same:
-                    yield 'same', ia, amatch
+                    yield b'same', ia, amatch
                 elif equal_a and not equal_b:
-                    yield 'b', ib, bmatch
+                    yield b'b', ib, bmatch
                 elif equal_b and not equal_a:
-                    yield 'a', ia, amatch
+                    yield b'a', ia, amatch
                 elif not equal_a and not equal_b:
-                    yield 'conflict', iz, zmatch, ia, amatch, ib, bmatch
+                    yield b'conflict', iz, zmatch, ia, amatch, ib, bmatch
                 else:
-                    raise AssertionError("can't handle a=b=base but unmatched")
+                    raise AssertionError(b"can't handle a=b=base but unmatched")
 
                 ia = amatch
                 ib = bmatch
@@ -275,7 +275,7 @@
                 assert ib == bmatch
                 assert iz == zmatch
 
-                yield 'unchanged', zmatch, zend
+                yield b'unchanged', zmatch, zend
                 iz = zend
                 ia = aend
                 ib = bend
@@ -288,7 +288,7 @@
         region and are instead considered the same.
         """
         for region in merge_regions:
-            if region[0] != "conflict":
+            if region[0] != b"conflict":
                 yield region
                 continue
             issue, z1, z2, a1, a2, b1, b2 = region
@@ -314,10 +314,10 @@
             endmatches = ii
 
             if startmatches > 0:
-                yield 'same', a1, a1 + startmatches
+                yield b'same', a1, a1 + startmatches
 
             yield (
-                'conflict',
+                b'conflict',
                 z1,
                 z2,
                 a1 + startmatches,
@@ -327,7 +327,7 @@
             )
 
             if endmatches > 0:
-                yield 'same', a2 - endmatches, a2
+                yield b'same', a2 - endmatches, a2
 
     def find_sync_regions(self):
         """Return a list of sync regions, where both descendants match the base.
@@ -420,17 +420,17 @@
     """verifies that text is non-binary (unless opts[text] is passed,
     then we just warn)"""
     if stringutil.binary(text):
-        msg = _("%s looks like a binary file.") % path
-        if not opts.get('quiet'):
-            ui.warn(_('warning: %s\n') % msg)
-        if not opts.get('text'):
+        msg = _(b"%s looks like a binary file.") % path
+        if not opts.get(b'quiet'):
+            ui.warn(_(b'warning: %s\n') % msg)
+        if not opts.get(b'text'):
             raise error.Abort(msg)
     return text
 
 
 def _picklabels(defaults, overrides):
     if len(overrides) > 3:
-        raise error.Abort(_("can only specify three labels."))
+        raise error.Abort(_(b"can only specify three labels."))
     result = defaults[:]
     for i, override in enumerate(overrides):
         result[i] = override
@@ -454,11 +454,11 @@
         # repository usually sees) might be more useful.
         return _verifytext(ctx.decodeddata(), ctx.path(), ui, opts)
 
-    mode = opts.get('mode', 'merge')
+    mode = opts.get(b'mode', b'merge')
     name_a, name_b, name_base = None, None, None
-    if mode != 'union':
+    if mode != b'union':
         name_a, name_b, name_base = _picklabels(
-            [localctx.path(), otherctx.path(), None], opts.get('label', [])
+            [localctx.path(), otherctx.path(), None], opts.get(b'label', [])
         )
 
     try:
@@ -470,29 +470,29 @@
 
     m3 = Merge3Text(basetext, localtext, othertext)
     extrakwargs = {
-        "localorother": opts.get("localorother", None),
-        'minimize': True,
+        b"localorother": opts.get(b"localorother", None),
+        b'minimize': True,
     }
-    if mode == 'union':
-        extrakwargs['start_marker'] = None
-        extrakwargs['mid_marker'] = None
-        extrakwargs['end_marker'] = None
+    if mode == b'union':
+        extrakwargs[b'start_marker'] = None
+        extrakwargs[b'mid_marker'] = None
+        extrakwargs[b'end_marker'] = None
     elif name_base is not None:
-        extrakwargs['base_marker'] = '|||||||'
-        extrakwargs['name_base'] = name_base
-        extrakwargs['minimize'] = False
+        extrakwargs[b'base_marker'] = b'|||||||'
+        extrakwargs[b'name_base'] = name_base
+        extrakwargs[b'minimize'] = False
 
-    mergedtext = ""
+    mergedtext = b""
     for line in m3.merge_lines(
         name_a=name_a, name_b=name_b, **pycompat.strkwargs(extrakwargs)
     ):
-        if opts.get('print'):
+        if opts.get(b'print'):
             ui.fout.write(line)
         else:
             mergedtext += line
 
-    if not opts.get('print'):
+    if not opts.get(b'print'):
         localctx.write(mergedtext, localctx.flags())
 
-    if m3.conflicts and not mode == 'union':
+    if m3.conflicts and not mode == b'union':
         return 1
--- a/mercurial/smartset.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/smartset.py	Sun Oct 06 09:48:39 2019 -0400
@@ -17,7 +17,7 @@
 
 
 def _typename(o):
-    return pycompat.sysbytes(type(o).__name__).lstrip('_')
+    return pycompat.sysbytes(type(o).__name__).lstrip(b'_')
 
 
 class abstractsmartset(object):
@@ -63,7 +63,7 @@
             for v in self.fastasc():
                 break
             else:
-                raise ValueError('arg is an empty sequence')
+                raise ValueError(b'arg is an empty sequence')
         self.min = lambda: v
         return v
 
@@ -75,7 +75,7 @@
             for v in self.fastdesc():
                 break
             else:
-                raise ValueError('arg is an empty sequence')
+                raise ValueError(b'arg is an empty sequence')
         self.max = lambda: v
         return v
 
@@ -125,7 +125,7 @@
         This is part of the mandatory API for smartset."""
         c = other.__contains__
         return self.filter(
-            lambda r: not c(r), condrepr=('<not %r>', other), cache=False
+            lambda r: not c(r), condrepr=(b'<not %r>', other), cache=False
         )
 
     def filter(self, condition, condrepr=None, cache=True):
@@ -137,14 +137,14 @@
 
         This is part of the mandatory API for smartset."""
         # builtin cannot be cached. but do not needs to
-        if cache and util.safehasattr(condition, '__code__'):
+        if cache and util.safehasattr(condition, b'__code__'):
             condition = util.cachefunc(condition)
         return filteredset(self, condition, condrepr)
 
     def slice(self, start, stop):
         """Return new smartset that contains selected elements from this set"""
         if start < 0 or stop < 0:
-            raise error.ProgrammingError('negative index not allowed')
+            raise error.ProgrammingError(b'negative index not allowed')
         return self._slice(start, stop)
 
     def _slice(self, start, stop):
@@ -161,7 +161,7 @@
             if y is None:
                 break
             ys.append(y)
-        return baseset(ys, datarepr=('slice=%d:%d %r', start, stop, self))
+        return baseset(ys, datarepr=(b'slice=%d:%d %r', start, stop, self))
 
 
 class baseset(abstractsmartset):
@@ -359,10 +359,10 @@
         return s
 
     def __and__(self, other):
-        return self._fastsetop(other, '__and__')
+        return self._fastsetop(other, b'__and__')
 
     def __sub__(self, other):
-        return self._fastsetop(other, '__sub__')
+        return self._fastsetop(other, b'__sub__')
 
     def _slice(self, start, stop):
         # creating new list should be generally cheaper than iterating items
@@ -378,7 +378,7 @@
 
     @encoding.strmethod
     def __repr__(self):
-        d = {None: '', False: '-', True: '+'}[self._ascending]
+        d = {None: b'', False: b'-', True: b'+'}[self._ascending]
         s = stringutil.buildrepr(self._datarepr)
         if not s:
             l = self._list
@@ -388,7 +388,7 @@
             if self._ascending is not None:
                 l = self._asclist
             s = pycompat.byterepr(l)
-        return '<%s%s %s>' % (_typename(self), d, s)
+        return b'<%s%s %s>' % (_typename(self), d, s)
 
 
 class filteredset(abstractsmartset):
@@ -508,7 +508,7 @@
         s = stringutil.buildrepr(self._condrepr)
         if s:
             xs.append(s)
-        return '<%s %s>' % (_typename(self), ', '.join(xs))
+        return b'<%s %s>' % (_typename(self), b', '.join(xs))
 
 
 def _iterordered(ascending, iter1, iter2):
@@ -668,9 +668,9 @@
         # try to use our own fast iterator if it exists
         self._trysetasclist()
         if self._ascending:
-            attr = 'fastasc'
+            attr = b'fastasc'
         else:
-            attr = 'fastdesc'
+            attr = b'fastdesc'
         it = getattr(self, attr)
         if it is not None:
             return it()
@@ -760,8 +760,8 @@
 
     @encoding.strmethod
     def __repr__(self):
-        d = {None: '', False: '-', True: '+'}[self._ascending]
-        return '<%s%s %r, %r>' % (_typename(self), d, self._r1, self._r2)
+        d = {None: b'', False: b'-', True: b'+'}[self._ascending]
+        return b'<%s%s %r, %r>' % (_typename(self), d, self._r1, self._r2)
 
 
 class generatorset(abstractsmartset):
@@ -928,8 +928,8 @@
 
     @encoding.strmethod
     def __repr__(self):
-        d = {False: '-', True: '+'}[self._ascending]
-        return '<%s%s>' % (_typename(self), d)
+        d = {False: b'-', True: b'+'}[self._ascending]
+        return b'<%s%s>' % (_typename(self), d)
 
 
 class _generatorsetasc(generatorset):
@@ -1104,8 +1104,8 @@
 
     @encoding.strmethod
     def __repr__(self):
-        d = {False: '-', True: '+'}[self._ascending]
-        return '<%s%s %d:%d>' % (_typename(self), d, self._start, self._end)
+        d = {False: b'-', True: b'+'}[self._ascending]
+        return b'<%s%s %d:%d>' % (_typename(self), d, self._start, self._end)
 
 
 class fullreposet(_spanset):
@@ -1127,7 +1127,7 @@
         This boldly assumes the other contains valid revs only.
         """
         # other not a smartset, make is so
-        if not util.safehasattr(other, 'isascending'):
+        if not util.safehasattr(other, b'isascending'):
             # filter out hidden revision
             # (this boldly assumes all smartset are pure)
             #
--- a/mercurial/sparse.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/sparse.py	Sun Oct 06 09:48:39 2019 -0400
@@ -44,49 +44,52 @@
     current = None
     havesection = False
 
-    for line in raw.split('\n'):
+    for line in raw.split(b'\n'):
         line = line.strip()
-        if not line or line.startswith('#'):
+        if not line or line.startswith(b'#'):
             # empty or comment line, skip
             continue
-        elif line.startswith('%include '):
+        elif line.startswith(b'%include '):
             line = line[9:].strip()
             if line:
                 profiles.add(line)
-        elif line == '[include]':
+        elif line == b'[include]':
             if havesection and current != includes:
                 # TODO pass filename into this API so we can report it.
                 raise error.Abort(
                     _(
-                        '%(action)s config cannot have includes '
-                        'after excludes'
+                        b'%(action)s config cannot have includes '
+                        b'after excludes'
                     )
-                    % {'action': action}
+                    % {b'action': action}
                 )
             havesection = True
             current = includes
             continue
-        elif line == '[exclude]':
+        elif line == b'[exclude]':
             havesection = True
             current = excludes
         elif line:
             if current is None:
                 raise error.Abort(
-                    _('%(action)s config entry outside of ' 'section: %(line)s')
-                    % {'action': action, 'line': line},
+                    _(
+                        b'%(action)s config entry outside of '
+                        b'section: %(line)s'
+                    )
+                    % {b'action': action, b'line': line},
                     hint=_(
-                        'add an [include] or [exclude] line '
-                        'to declare the entry type'
+                        b'add an [include] or [exclude] line '
+                        b'to declare the entry type'
                     ),
                 )
 
-            if line.strip().startswith('/'):
+            if line.strip().startswith(b'/'):
                 ui.warn(
                     _(
-                        'warning: %(action)s profile cannot use'
-                        ' paths starting with /, ignoring %(line)s\n'
+                        b'warning: %(action)s profile cannot use'
+                        b' paths starting with /, ignoring %(line)s\n'
                     )
-                    % {'action': action, 'line': line}
+                    % {b'action': action, b'line': line}
                 )
                 continue
             current.add(line)
@@ -112,16 +115,16 @@
     if not enabled:
         return set(), set(), set()
 
-    raw = repo.vfs.tryread('sparse')
+    raw = repo.vfs.tryread(b'sparse')
     if not raw:
         return set(), set(), set()
 
     if rev is None:
         raise error.Abort(
-            _('cannot parse sparse patterns from working ' 'directory')
+            _(b'cannot parse sparse patterns from working ' b'directory')
         )
 
-    includes, excludes, profiles = parseconfig(repo.ui, raw, 'sparse')
+    includes, excludes, profiles = parseconfig(repo.ui, raw, b'sparse')
     ctx = repo[rev]
 
     if profiles:
@@ -137,17 +140,19 @@
                 raw = readprofile(repo, profile, rev)
             except error.ManifestLookupError:
                 msg = (
-                    "warning: sparse profile '%s' not found "
-                    "in rev %s - ignoring it\n" % (profile, ctx)
+                    b"warning: sparse profile '%s' not found "
+                    b"in rev %s - ignoring it\n" % (profile, ctx)
                 )
                 # experimental config: sparse.missingwarning
-                if repo.ui.configbool('sparse', 'missingwarning'):
+                if repo.ui.configbool(b'sparse', b'missingwarning'):
                     repo.ui.warn(msg)
                 else:
                     repo.ui.debug(msg)
                 continue
 
-            pincludes, pexcludes, subprofs = parseconfig(repo.ui, raw, 'sparse')
+            pincludes, pexcludes, subprofs = parseconfig(
+                repo.ui, raw, b'sparse'
+            )
             includes.update(pincludes)
             excludes.update(pexcludes)
             profiles.update(subprofs)
@@ -155,7 +160,7 @@
         profiles = visited
 
     if includes:
-        includes.add('.hg*')
+        includes.add(b'.hg*')
 
     return includes, excludes, profiles
 
@@ -192,56 +197,56 @@
     """
     cache = repo._sparsesignaturecache
 
-    signature = cache.get('signature')
+    signature = cache.get(b'signature')
 
     if includetemp:
-        tempsignature = cache.get('tempsignature')
+        tempsignature = cache.get(b'tempsignature')
     else:
-        tempsignature = '0'
+        tempsignature = b'0'
 
     if signature is None or (includetemp and tempsignature is None):
-        signature = hex(hashlib.sha1(repo.vfs.tryread('sparse')).digest())
-        cache['signature'] = signature
+        signature = hex(hashlib.sha1(repo.vfs.tryread(b'sparse')).digest())
+        cache[b'signature'] = signature
 
         if includetemp:
-            raw = repo.vfs.tryread('tempsparse')
+            raw = repo.vfs.tryread(b'tempsparse')
             tempsignature = hex(hashlib.sha1(raw).digest())
-            cache['tempsignature'] = tempsignature
+            cache[b'tempsignature'] = tempsignature
 
-    return '%s %s' % (signature, tempsignature)
+    return b'%s %s' % (signature, tempsignature)
 
 
 def writeconfig(repo, includes, excludes, profiles):
     """Write the sparse config file given a sparse configuration."""
-    with repo.vfs('sparse', 'wb') as fh:
+    with repo.vfs(b'sparse', b'wb') as fh:
         for p in sorted(profiles):
-            fh.write('%%include %s\n' % p)
+            fh.write(b'%%include %s\n' % p)
 
         if includes:
-            fh.write('[include]\n')
+            fh.write(b'[include]\n')
             for i in sorted(includes):
                 fh.write(i)
-                fh.write('\n')
+                fh.write(b'\n')
 
         if excludes:
-            fh.write('[exclude]\n')
+            fh.write(b'[exclude]\n')
             for e in sorted(excludes):
                 fh.write(e)
-                fh.write('\n')
+                fh.write(b'\n')
 
     repo._sparsesignaturecache.clear()
 
 
 def readtemporaryincludes(repo):
-    raw = repo.vfs.tryread('tempsparse')
+    raw = repo.vfs.tryread(b'tempsparse')
     if not raw:
         return set()
 
-    return set(raw.split('\n'))
+    return set(raw.split(b'\n'))
 
 
 def writetemporaryincludes(repo, includes):
-    repo.vfs.write('tempsparse', '\n'.join(sorted(includes)))
+    repo.vfs.write(b'tempsparse', b'\n'.join(sorted(includes)))
     repo._sparsesignaturecache.clear()
 
 
@@ -253,7 +258,7 @@
 
 
 def prunetemporaryincludes(repo):
-    if not enabled or not repo.vfs.exists('tempsparse'):
+    if not enabled or not repo.vfs.exists(b'tempsparse'):
         return
 
     s = repo.status()
@@ -268,24 +273,25 @@
     tempincludes = readtemporaryincludes(repo)
     for file in tempincludes:
         if file in dirstate and not sparsematch(file):
-            message = _('dropping temporarily included sparse files')
+            message = _(b'dropping temporarily included sparse files')
             actions.append((file, None, message))
             dropped.append(file)
 
     typeactions = mergemod.emptyactions()
-    typeactions['r'] = actions
+    typeactions[b'r'] = actions
     mergemod.applyupdates(
-        repo, typeactions, repo[None], repo['.'], False, wantfiledata=False
+        repo, typeactions, repo[None], repo[b'.'], False, wantfiledata=False
     )
 
     # Fix dirstate
     for file in dropped:
         dirstate.drop(file)
 
-    repo.vfs.unlink('tempsparse')
+    repo.vfs.unlink(b'tempsparse')
     repo._sparsesignaturecache.clear()
     msg = _(
-        'cleaned up %d temporarily added file(s) from the ' 'sparse checkout\n'
+        b'cleaned up %d temporarily added file(s) from the '
+        b'sparse checkout\n'
     )
     repo.ui.status(msg % len(tempincludes))
 
@@ -293,8 +299,8 @@
 def forceincludematcher(matcher, includes):
     """Returns a matcher that returns true for any of the forced includes
     before testing against the actual matcher."""
-    kindpats = [('path', include, '') for include in includes]
-    includematcher = matchmod.includematcher('', kindpats)
+    kindpats = [(b'path', include, b'') for include in includes]
+    includematcher = matchmod.includematcher(b'', kindpats)
     return matchmod.unionmatcher([includematcher, matcher])
 
 
@@ -319,7 +325,7 @@
 
     signature = configsignature(repo, includetemp=includetemp)
 
-    key = '%s %s' % (signature, ' '.join(map(pycompat.bytestr, revs)))
+    key = b'%s %s' % (signature, b' '.join(map(pycompat.bytestr, revs)))
 
     result = repo._sparsematchercache.get(key)
     if result:
@@ -333,11 +339,11 @@
             if includes or excludes:
                 matcher = matchmod.match(
                     repo.root,
-                    '',
+                    b'',
                     [],
                     include=includes,
                     exclude=excludes,
-                    default='relpath',
+                    default=b'relpath',
                 )
                 matchers.append(matcher)
         except IOError:
@@ -388,17 +394,17 @@
         files.add(file)
         if sparsematch(file):
             prunedactions[file] = action
-        elif type == 'm':
+        elif type == b'm':
             temporaryfiles.append(file)
             prunedactions[file] = action
         elif branchmerge:
-            if type != 'k':
+            if type != b'k':
                 temporaryfiles.append(file)
                 prunedactions[file] = action
-        elif type == 'f':
+        elif type == b'f':
             prunedactions[file] = action
         elif file in wctx:
-            prunedactions[file] = ('r', args, msg)
+            prunedactions[file] = (b'r', args, msg)
 
         if branchmerge and type == mergemod.ACTION_MERGE:
             f1, f2, fa, move, anc = args
@@ -408,8 +414,8 @@
     if len(temporaryfiles) > 0:
         repo.ui.status(
             _(
-                'temporarily included %d file(s) in the sparse '
-                'checkout for merging\n'
+                b'temporarily included %d file(s) in the sparse '
+                b'checkout for merging\n'
             )
             % len(temporaryfiles)
         )
@@ -417,7 +423,7 @@
 
         # Add the new files to the working copy so they can be merged, etc
         actions = []
-        message = 'temporarily adding to sparse checkout'
+        message = b'temporarily adding to sparse checkout'
         wctxmanifest = repo[None].manifest()
         for file in temporaryfiles:
             if file in wctxmanifest:
@@ -425,9 +431,9 @@
                 actions.append((file, (fctx.flags(), False), message))
 
         typeactions = mergemod.emptyactions()
-        typeactions['g'] = actions
+        typeactions[b'g'] = actions
         mergemod.applyupdates(
-            repo, typeactions, repo[None], repo['.'], False, wantfiledata=False
+            repo, typeactions, repo[None], repo[b'.'], False, wantfiledata=False
         )
 
         dirstate = repo.dirstate
@@ -446,9 +452,9 @@
             new = sparsematch(file)
             if not old and new:
                 flags = mf.flags(file)
-                prunedactions[file] = ('g', (flags, False), '')
+                prunedactions[file] = (b'g', (flags, False), b'')
             elif old and not new:
-                prunedactions[file] = ('r', [], '')
+                prunedactions[file] = (b'r', [], b'')
 
     return prunedactions
 
@@ -472,17 +478,17 @@
 
     for f in pending:
         if not sparsematch(f):
-            repo.ui.warn(_("pending changes to '%s'\n") % f)
+            repo.ui.warn(_(b"pending changes to '%s'\n") % f)
             abort = not force
 
     if abort:
         raise error.Abort(
-            _('could not update sparseness due to pending ' 'changes')
+            _(b'could not update sparseness due to pending ' b'changes')
         )
 
     # Calculate actions
     dirstate = repo.dirstate
-    ctx = repo['.']
+    ctx = repo[b'.']
     added = []
     lookup = []
     dropped = []
@@ -499,29 +505,29 @@
         if (new and not old) or (old and new and not file in dirstate):
             fl = mf.flags(file)
             if repo.wvfs.exists(file):
-                actions[file] = ('e', (fl,), '')
+                actions[file] = (b'e', (fl,), b'')
                 lookup.append(file)
             else:
-                actions[file] = ('g', (fl, False), '')
+                actions[file] = (b'g', (fl, False), b'')
                 added.append(file)
         # Drop files that are newly excluded, or that still exist in
         # the dirstate.
         elif (old and not new) or (not old and not new and file in dirstate):
             dropped.append(file)
             if file not in pending:
-                actions[file] = ('r', [], '')
+                actions[file] = (b'r', [], b'')
 
     # Verify there are no pending changes in newly included files
     abort = False
     for file in lookup:
-        repo.ui.warn(_("pending changes to '%s'\n") % file)
+        repo.ui.warn(_(b"pending changes to '%s'\n") % file)
         abort = not force
     if abort:
         raise error.Abort(
             _(
-                'cannot change sparseness due to pending '
-                'changes (delete the files or use '
-                '--force to bring them back dirty)'
+                b'cannot change sparseness due to pending '
+                b'changes (delete the files or use '
+                b'--force to bring them back dirty)'
             )
         )
 
@@ -539,7 +545,7 @@
         typeactions[m].append((f, args, msg))
 
     mergemod.applyupdates(
-        repo, typeactions, repo[None], repo['.'], False, wantfiledata=False
+        repo, typeactions, repo[None], repo[b'.'], False, wantfiledata=False
     )
 
     # Fix dirstate
@@ -577,8 +583,8 @@
     repo, includes, excludes, profiles, force=False, removing=False
 ):
     """Update the sparse config and working directory state."""
-    raw = repo.vfs.tryread('sparse')
-    oldincludes, oldexcludes, oldprofiles = parseconfig(repo.ui, raw, 'sparse')
+    raw = repo.vfs.tryread(b'sparse')
+    oldincludes, oldexcludes, oldprofiles = parseconfig(repo.ui, raw, b'sparse')
 
     oldstatus = repo.status()
     oldmatch = matcher(repo)
@@ -592,11 +598,11 @@
     # updated. But this requires massive rework to matcher() and its
     # consumers.
 
-    if 'exp-sparse' in oldrequires and removing:
-        repo.requirements.discard('exp-sparse')
+    if b'exp-sparse' in oldrequires and removing:
+        repo.requirements.discard(b'exp-sparse')
         scmutil.writerequires(repo.vfs, repo.requirements)
-    elif 'exp-sparse' not in oldrequires:
-        repo.requirements.add('exp-sparse')
+    elif b'exp-sparse' not in oldrequires:
+        repo.requirements.add(b'exp-sparse')
         scmutil.writerequires(repo.vfs, repo.requirements)
 
     try:
@@ -618,8 +624,8 @@
     directory is refreshed, as needed.
     """
     with repo.wlock():
-        raw = repo.vfs.tryread('sparse')
-        includes, excludes, profiles = parseconfig(repo.ui, raw, 'sparse')
+        raw = repo.vfs.tryread(b'sparse')
+        includes, excludes, profiles = parseconfig(repo.ui, raw, b'sparse')
 
         if not includes and not excludes:
             return
@@ -635,19 +641,19 @@
     """
     with repo.wlock():
         # read current configuration
-        raw = repo.vfs.tryread('sparse')
-        includes, excludes, profiles = parseconfig(repo.ui, raw, 'sparse')
+        raw = repo.vfs.tryread(b'sparse')
+        includes, excludes, profiles = parseconfig(repo.ui, raw, b'sparse')
         aincludes, aexcludes, aprofiles = activeconfig(repo)
 
         # Import rules on top; only take in rules that are not yet
         # part of the active rules.
         changed = False
         for p in paths:
-            with util.posixfile(util.expandpath(p), mode='rb') as fh:
+            with util.posixfile(util.expandpath(p), mode=b'rb') as fh:
                 raw = fh.read()
 
             iincludes, iexcludes, iprofiles = parseconfig(
-                repo.ui, raw, 'sparse'
+                repo.ui, raw, b'sparse'
             )
             oldsize = len(includes) + len(excludes) + len(profiles)
             includes.update(iincludes - aincludes)
@@ -696,9 +702,9 @@
     The new config is written out and a working directory refresh is performed.
     """
     with repo.wlock():
-        raw = repo.vfs.tryread('sparse')
+        raw = repo.vfs.tryread(b'sparse')
         oldinclude, oldexclude, oldprofiles = parseconfig(
-            repo.ui, raw, 'sparse'
+            repo.ui, raw, b'sparse'
         )
 
         if reset:
@@ -711,7 +717,7 @@
             newprofiles = set(oldprofiles)
 
         if any(os.path.isabs(pat) for pat in pats):
-            raise error.Abort(_('paths cannot be absolute'))
+            raise error.Abort(_(b'paths cannot be absolute'))
 
         if not usereporootpaths:
             # let's treat paths as relative to cwd
@@ -720,7 +726,7 @@
             for kindpat in pats:
                 kind, pat = matchmod._patsplit(kindpat, None)
                 if kind in matchmod.cwdrelativepatternkinds or kind is None:
-                    ap = (kind + ':' if kind else '') + pathutil.canonpath(
+                    ap = (kind + b':' if kind else b'') + pathutil.canonpath(
                         root, cwd, pat
                     )
                     abspats.append(ap)
@@ -778,24 +784,24 @@
     conflicting=0,
 ):
     """Print output summarizing sparse config changes."""
-    with ui.formatter('sparse', opts) as fm:
+    with ui.formatter(b'sparse', opts) as fm:
         fm.startitem()
         fm.condwrite(
             ui.verbose,
-            'profiles_added',
-            _('Profiles changed: %d\n'),
+            b'profiles_added',
+            _(b'Profiles changed: %d\n'),
             profilecount,
         )
         fm.condwrite(
             ui.verbose,
-            'include_rules_added',
-            _('Include rules changed: %d\n'),
+            b'include_rules_added',
+            _(b'Include rules changed: %d\n'),
             includecount,
         )
         fm.condwrite(
             ui.verbose,
-            'exclude_rules_added',
-            _('Exclude rules changed: %d\n'),
+            b'exclude_rules_added',
+            _(b'Exclude rules changed: %d\n'),
             excludecount,
         )
 
@@ -804,14 +810,14 @@
         # framework. No point in repeating ourselves in that case.
         if not fm.isplain():
             fm.condwrite(
-                ui.verbose, 'files_added', _('Files added: %d\n'), added
+                ui.verbose, b'files_added', _(b'Files added: %d\n'), added
             )
             fm.condwrite(
-                ui.verbose, 'files_dropped', _('Files dropped: %d\n'), dropped
+                ui.verbose, b'files_dropped', _(b'Files dropped: %d\n'), dropped
             )
             fm.condwrite(
                 ui.verbose,
-                'files_conflicting',
-                _('Files conflicting: %d\n'),
+                b'files_conflicting',
+                _(b'Files conflicting: %d\n'),
                 conflicting,
             )
--- a/mercurial/sshpeer.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/sshpeer.py	Sun Oct 06 09:48:39 2019 -0400
@@ -30,9 +30,9 @@
     """quote a string for the remote shell ... which we assume is sh"""
     if not s:
         return s
-    if re.match('[a-zA-Z0-9@%_+=:,./-]*$', s):
+    if re.match(b'[a-zA-Z0-9@%_+=:,./-]*$', s):
         return s
-    return "'%s'" % s.replace("'", "'\\''")
+    return b"'%s'" % s.replace(b"'", b"'\\''")
 
 
 def _forwardoutput(ui, pipe):
@@ -43,7 +43,7 @@
         s = procutil.readpipe(pipe)
         if s:
             for l in s.splitlines():
-                ui.status(_("remote: "), l, '\n')
+                ui.status(_(b"remote: "), l, b'\n')
 
 
 class doublepipe(object):
@@ -91,10 +91,10 @@
         return (self._main.fileno() in act, self._side.fileno() in act)
 
     def write(self, data):
-        return self._call('write', data)
+        return self._call(b'write', data)
 
     def read(self, size):
-        r = self._call('read', size)
+        r = self._call(b'read', size)
         if size != 0 and not r:
             # We've observed a condition that indicates the
             # stdout closed unexpectedly. Check stderr one
@@ -105,7 +105,7 @@
         return r
 
     def unbufferedread(self, size):
-        r = self._call('unbufferedread', size)
+        r = self._call(b'unbufferedread', size)
         if size != 0 and not r:
             # We've observed a condition that indicates the
             # stdout closed unexpectedly. Check stderr one
@@ -116,7 +116,7 @@
         return r
 
     def readline(self):
-        return self._call('readline')
+        return self._call(b'readline')
 
     def _call(self, methname, data=None):
         """call <methname> on "main", forward output of "side" while blocking
@@ -124,7 +124,7 @@
         # data can be '' or 0
         if (data is not None and not data) or self._main.closed:
             _forwardoutput(self._ui, self._side)
-            return ''
+            return b''
         while True:
             mainready, sideready = self._wait()
             if sideready:
@@ -154,7 +154,7 @@
         # Try to read from the err descriptor until EOF.
         try:
             for l in pipee:
-                ui.status(_('remote: '), l)
+                ui.status(_(b'remote: '), l)
         except (IOError, ValueError):
             pass
 
@@ -167,16 +167,16 @@
     Returns a tuple of (process, stdin, stdout, stderr) for the
     spawned process.
     """
-    cmd = '%s %s %s' % (
+    cmd = b'%s %s %s' % (
         sshcmd,
         args,
         procutil.shellquote(
-            '%s -R %s serve --stdio'
+            b'%s -R %s serve --stdio'
             % (_serverquote(remotecmd), _serverquote(path))
         ),
     )
 
-    ui.debug('running %s\n' % cmd)
+    ui.debug(b'running %s\n' % cmd)
     cmd = procutil.quotecommand(cmd)
 
     # no buffer allow the use of 'select'
@@ -192,12 +192,12 @@
 
     Returns a list of capabilities that are supported by this client.
     """
-    protoparams = {'partial-pull'}
+    protoparams = {b'partial-pull'}
     comps = [
         e.wireprotosupport().name
         for e in util.compengines.supportedwireengines(util.CLIENTROLE)
     ]
-    protoparams.add('comp=%s' % ','.join(comps))
+    protoparams.add(b'comp=%s' % b','.join(comps))
     return protoparams
 
 
@@ -206,8 +206,8 @@
         # Flush any output on stderr.
         _forwardoutput(ui, stderr)
 
-        msg = _('no suitable response from remote hg')
-        hint = ui.config('ui', 'ssherrorhint')
+        msg = _(b'no suitable response from remote hg')
+        hint = ui.config(b'ui', b'ssherrorhint')
         raise error.RepoError(msg, hint=hint)
 
     # The handshake consists of sending wire protocol commands in reverse
@@ -262,37 +262,37 @@
     # for output to our well-known ``between`` command. Of course, if
     # the banner contains ``1\n\n``, this will throw off our detection.
 
-    requestlog = ui.configbool('devel', 'debug.peer-request')
+    requestlog = ui.configbool(b'devel', b'debug.peer-request')
 
     # Generate a random token to help identify responses to version 2
     # upgrade request.
     token = pycompat.sysbytes(str(uuid.uuid4()))
     upgradecaps = [
-        ('proto', wireprotoserver.SSHV2),
+        (b'proto', wireprotoserver.SSHV2),
     ]
     upgradecaps = util.urlreq.urlencode(upgradecaps)
 
     try:
-        pairsarg = '%s-%s' % ('0' * 40, '0' * 40)
+        pairsarg = b'%s-%s' % (b'0' * 40, b'0' * 40)
         handshake = [
-            'hello\n',
-            'between\n',
-            'pairs %d\n' % len(pairsarg),
+            b'hello\n',
+            b'between\n',
+            b'pairs %d\n' % len(pairsarg),
             pairsarg,
         ]
 
         # Request upgrade to version 2 if configured.
-        if ui.configbool('experimental', 'sshpeer.advertise-v2'):
-            ui.debug('sending upgrade request: %s %s\n' % (token, upgradecaps))
-            handshake.insert(0, 'upgrade %s %s\n' % (token, upgradecaps))
+        if ui.configbool(b'experimental', b'sshpeer.advertise-v2'):
+            ui.debug(b'sending upgrade request: %s %s\n' % (token, upgradecaps))
+            handshake.insert(0, b'upgrade %s %s\n' % (token, upgradecaps))
 
         if requestlog:
-            ui.debug('devel-peer-request: hello+between\n')
-            ui.debug('devel-peer-request:   pairs: %d bytes\n' % len(pairsarg))
-        ui.debug('sending hello command\n')
-        ui.debug('sending between command\n')
+            ui.debug(b'devel-peer-request: hello+between\n')
+            ui.debug(b'devel-peer-request:   pairs: %d bytes\n' % len(pairsarg))
+        ui.debug(b'sending hello command\n')
+        ui.debug(b'sending between command\n')
 
-        stdin.write(''.join(handshake))
+        stdin.write(b''.join(handshake))
         stdin.flush()
     except IOError:
         badresponse()
@@ -301,7 +301,7 @@
     protoname = wireprototypes.SSHV1
     reupgraded = re.compile(b'^upgraded %s (.*)$' % stringutil.reescape(token))
 
-    lines = ['', 'dummy']
+    lines = [b'', b'dummy']
     max_noise = 500
     while lines[-1] and max_noise:
         try:
@@ -313,7 +313,7 @@
             m = reupgraded.match(l)
             if m:
                 protoname = m.group(1)
-                ui.debug('protocol upgraded to %s\n' % protoname)
+                ui.debug(b'protocol upgraded to %s\n' % protoname)
                 # If an upgrade was handled, the ``hello`` and ``between``
                 # requests are ignored. The next output belongs to the
                 # protocol, so stop scanning lines.
@@ -322,10 +322,10 @@
             # Otherwise it could be a banner, ``0\n`` response if server
             # doesn't support upgrade.
 
-            if lines[-1] == '1\n' and l == '\n':
+            if lines[-1] == b'1\n' and l == b'\n':
                 break
             if l:
-                ui.debug('remote: ', l)
+                ui.debug(b'remote: ', l)
             lines.append(l)
             max_noise -= 1
         except IOError:
@@ -341,8 +341,8 @@
         for l in reversed(lines):
             # Look for response to ``hello`` command. Scan from the back so
             # we don't misinterpret banner output as the command reply.
-            if l.startswith('capabilities:'):
-                caps.update(l[:-1].split(':')[1].split())
+            if l.startswith(b'capabilities:'):
+                caps.update(l[:-1].split(b':')[1].split())
                 break
     elif protoname == wireprotoserver.SSHV2:
         # We see a line with number of bytes to follow and then a value
@@ -354,12 +354,12 @@
             badresponse()
 
         capsline = stdout.read(valuelen)
-        if not capsline.startswith('capabilities: '):
+        if not capsline.startswith(b'capabilities: '):
             badresponse()
 
-        ui.debug('remote: %s\n' % capsline)
+        ui.debug(b'remote: %s\n' % capsline)
 
-        caps.update(capsline.split(':')[1].split())
+        caps.update(capsline.split(b':')[1].split())
         # Trailing newline.
         stdout.read(1)
 
@@ -412,7 +412,7 @@
     # Commands that have a "framed" response where the first line of the
     # response contains the length of that response.
     _FRAMED_COMMANDS = {
-        'batch',
+        b'batch',
     }
 
     # Begin of ipeerconnection interface.
@@ -455,34 +455,34 @@
 
     def _sendrequest(self, cmd, args, framed=False):
         if self.ui.debugflag and self.ui.configbool(
-            'devel', 'debug.peer-request'
+            b'devel', b'debug.peer-request'
         ):
             dbg = self.ui.debug
-            line = 'devel-peer-request: %s\n'
+            line = b'devel-peer-request: %s\n'
             dbg(line % cmd)
             for key, value in sorted(args.items()):
                 if not isinstance(value, dict):
-                    dbg(line % '  %s: %d bytes' % (key, len(value)))
+                    dbg(line % b'  %s: %d bytes' % (key, len(value)))
                 else:
                     for dk, dv in sorted(value.items()):
-                        dbg(line % '  %s-%s: %d' % (key, dk, len(dv)))
-        self.ui.debug("sending %s command\n" % cmd)
-        self._pipeo.write("%s\n" % cmd)
+                        dbg(line % b'  %s-%s: %d' % (key, dk, len(dv)))
+        self.ui.debug(b"sending %s command\n" % cmd)
+        self._pipeo.write(b"%s\n" % cmd)
         _func, names = wireprotov1server.commands[cmd]
         keys = names.split()
         wireargs = {}
         for k in keys:
-            if k == '*':
-                wireargs['*'] = args
+            if k == b'*':
+                wireargs[b'*'] = args
                 break
             else:
                 wireargs[k] = args[k]
                 del args[k]
         for k, v in sorted(wireargs.iteritems()):
-            self._pipeo.write("%s %d\n" % (k, len(v)))
+            self._pipeo.write(b"%s %d\n" % (k, len(v)))
             if isinstance(v, dict):
                 for dk, dv in v.iteritems():
-                    self._pipeo.write("%s %d\n" % (dk, len(dv)))
+                    self._pipeo.write(b"%s %d\n" % (dk, len(dv)))
                     self._pipeo.write(dv)
             else:
                 self._pipeo.write(v)
@@ -515,21 +515,21 @@
         # continue submitting the payload.
         r = self._call(cmd, **args)
         if r:
-            return '', r
+            return b'', r
 
         # The payload consists of frames with content followed by an empty
         # frame.
-        for d in iter(lambda: fp.read(4096), ''):
+        for d in iter(lambda: fp.read(4096), b''):
             self._writeframed(d)
-        self._writeframed("", flush=True)
+        self._writeframed(b"", flush=True)
 
         # In case of success, there is an empty frame and a frame containing
         # the integer result (as a string).
         # In case of error, there is a non-empty frame containing the error.
         r = self._readframed()
         if r:
-            return '', r
-        return self._readframed(), ''
+            return b'', r
+        return self._readframed(), b''
 
     def _calltwowaystream(self, cmd, fp, **args):
         # The server responds with an empty frame if the client should
@@ -537,29 +537,29 @@
         r = self._call(cmd, **args)
         if r:
             # XXX needs to be made better
-            raise error.Abort(_('unexpected remote reply: %s') % r)
+            raise error.Abort(_(b'unexpected remote reply: %s') % r)
 
         # The payload consists of frames with content followed by an empty
         # frame.
-        for d in iter(lambda: fp.read(4096), ''):
+        for d in iter(lambda: fp.read(4096), b''):
             self._writeframed(d)
-        self._writeframed("", flush=True)
+        self._writeframed(b"", flush=True)
 
         return self._pipei
 
     def _getamount(self):
         l = self._pipei.readline()
-        if l == '\n':
+        if l == b'\n':
             if self._autoreadstderr:
                 self._readerr()
-            msg = _('check previous remote output')
+            msg = _(b'check previous remote output')
             self._abort(error.OutOfBandError(hint=msg))
         if self._autoreadstderr:
             self._readerr()
         try:
             return int(l)
         except ValueError:
-            self._abort(error.ResponseError(_("unexpected response:"), l))
+            self._abort(error.ResponseError(_(b"unexpected response:"), l))
 
     def _readframed(self):
         size = self._getamount()
@@ -569,7 +569,7 @@
         return self._pipei.read(size)
 
     def _writeframed(self, data, flush=False):
-        self._pipeo.write("%d\n" % len(data))
+        self._pipeo.write(b"%d\n" % len(data))
         if data:
             self._pipeo.write(data)
         if flush:
@@ -631,7 +631,7 @@
     else:
         _cleanuppipes(ui, stdout, stdin, stderr)
         raise error.RepoError(
-            _('unknown version of SSH protocol: %s') % protoname
+            _(b'unknown version of SSH protocol: %s') % protoname
         )
 
 
@@ -641,19 +641,19 @@
     The returned object conforms to the ``wireprotov1peer.wirepeer`` interface.
     """
     u = util.url(path, parsequery=False, parsefragment=False)
-    if u.scheme != 'ssh' or not u.host or u.path is None:
-        raise error.RepoError(_("couldn't parse location %s") % path)
+    if u.scheme != b'ssh' or not u.host or u.path is None:
+        raise error.RepoError(_(b"couldn't parse location %s") % path)
 
     util.checksafessh(path)
 
     if u.passwd is not None:
-        raise error.RepoError(_('password in URL not supported'))
+        raise error.RepoError(_(b'password in URL not supported'))
 
-    sshcmd = ui.config('ui', 'ssh')
-    remotecmd = ui.config('ui', 'remotecmd')
-    sshaddenv = dict(ui.configitems('sshenv'))
+    sshcmd = ui.config(b'ui', b'ssh')
+    remotecmd = ui.config(b'ui', b'remotecmd')
+    sshaddenv = dict(ui.configitems(b'sshenv'))
     sshenv = procutil.shellenviron(sshaddenv)
-    remotepath = u.path or '.'
+    remotepath = u.path or b'.'
 
     args = procutil.sshargs(sshcmd, u.host, u.user, u.port)
 
@@ -664,21 +664,24 @@
         # supports said requested feature.
         if createopts:
             raise error.RepoError(
-                _('cannot create remote SSH repositories ' 'with extra options')
+                _(
+                    b'cannot create remote SSH repositories '
+                    b'with extra options'
+                )
             )
 
-        cmd = '%s %s %s' % (
+        cmd = b'%s %s %s' % (
             sshcmd,
             args,
             procutil.shellquote(
-                '%s init %s'
+                b'%s init %s'
                 % (_serverquote(remotecmd), _serverquote(remotepath))
             ),
         )
-        ui.debug('running %s\n' % cmd)
-        res = ui.system(cmd, blockedtag='sshpeer', environ=sshenv)
+        ui.debug(b'running %s\n' % cmd)
+        res = ui.system(cmd, blockedtag=b'sshpeer', environ=sshenv)
         if res != 0:
-            raise error.RepoError(_('could not create remote repo'))
+            raise error.RepoError(_(b'could not create remote repo'))
 
     proc, stdin, stdout, stderr = _makeconnection(
         ui, sshcmd, args, remotecmd, remotepath, sshenv
@@ -688,13 +691,13 @@
 
     # Finally, if supported by the server, notify it about our own
     # capabilities.
-    if 'protocaps' in peer.capabilities():
+    if b'protocaps' in peer.capabilities():
         try:
             peer._call(
-                "protocaps", caps=' '.join(sorted(_clientcapabilities()))
+                b"protocaps", caps=b' '.join(sorted(_clientcapabilities()))
             )
         except IOError:
             peer._cleanup()
-            raise error.RepoError(_('capability exchange failed'))
+            raise error.RepoError(_(b'capability exchange failed'))
 
     return peer
--- a/mercurial/sslutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/sslutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -36,27 +36,27 @@
 # separate code paths depending on support in Python.
 
 configprotocols = {
-    'tls1.0',
-    'tls1.1',
-    'tls1.2',
+    b'tls1.0',
+    b'tls1.1',
+    b'tls1.2',
 }
 
 hassni = getattr(ssl, 'HAS_SNI', False)
 
 # TLS 1.1 and 1.2 may not be supported if the OpenSSL Python is compiled
 # against doesn't support them.
-supportedprotocols = {'tls1.0'}
-if util.safehasattr(ssl, 'PROTOCOL_TLSv1_1'):
-    supportedprotocols.add('tls1.1')
-if util.safehasattr(ssl, 'PROTOCOL_TLSv1_2'):
-    supportedprotocols.add('tls1.2')
+supportedprotocols = {b'tls1.0'}
+if util.safehasattr(ssl, b'PROTOCOL_TLSv1_1'):
+    supportedprotocols.add(b'tls1.1')
+if util.safehasattr(ssl, b'PROTOCOL_TLSv1_2'):
+    supportedprotocols.add(b'tls1.2')
 
 try:
     # ssl.SSLContext was added in 2.7.9 and presence indicates modern
     # SSL/TLS features are available.
     SSLContext = ssl.SSLContext
     modernssl = True
-    _canloaddefaultcerts = util.safehasattr(SSLContext, 'load_default_certs')
+    _canloaddefaultcerts = util.safehasattr(SSLContext, b'load_default_certs')
 except AttributeError:
     modernssl = False
     _canloaddefaultcerts = False
@@ -87,9 +87,9 @@
 
         def load_verify_locations(self, cafile=None, capath=None, cadata=None):
             if capath:
-                raise error.Abort(_('capath not supported'))
+                raise error.Abort(_(b'capath not supported'))
             if cadata:
-                raise error.Abort(_('cadata not supported'))
+                raise error.Abort(_(b'cadata not supported'))
 
             self._cacerts = cafile
 
@@ -123,175 +123,182 @@
     s = {
         # Whether we should attempt to load default/available CA certs
         # if an explicit ``cafile`` is not defined.
-        'allowloaddefaultcerts': True,
+        b'allowloaddefaultcerts': True,
         # List of 2-tuple of (hash algorithm, hash).
-        'certfingerprints': [],
+        b'certfingerprints': [],
         # Path to file containing concatenated CA certs. Used by
         # SSLContext.load_verify_locations().
-        'cafile': None,
+        b'cafile': None,
         # Whether certificate verification should be disabled.
-        'disablecertverification': False,
+        b'disablecertverification': False,
         # Whether the legacy [hostfingerprints] section has data for this host.
-        'legacyfingerprint': False,
+        b'legacyfingerprint': False,
         # PROTOCOL_* constant to use for SSLContext.__init__.
-        'protocol': None,
+        b'protocol': None,
         # String representation of minimum protocol to be used for UI
         # presentation.
-        'protocolui': None,
+        b'protocolui': None,
         # ssl.CERT_* constant used by SSLContext.verify_mode.
-        'verifymode': None,
+        b'verifymode': None,
         # Defines extra ssl.OP* bitwise options to set.
-        'ctxoptions': None,
+        b'ctxoptions': None,
         # OpenSSL Cipher List to use (instead of default).
-        'ciphers': None,
+        b'ciphers': None,
     }
 
     # Allow minimum TLS protocol to be specified in the config.
     def validateprotocol(protocol, key):
         if protocol not in configprotocols:
             raise error.Abort(
-                _('unsupported protocol from hostsecurity.%s: %s')
+                _(b'unsupported protocol from hostsecurity.%s: %s')
                 % (key, protocol),
-                hint=_('valid protocols: %s')
-                % ' '.join(sorted(configprotocols)),
+                hint=_(b'valid protocols: %s')
+                % b' '.join(sorted(configprotocols)),
             )
 
     # We default to TLS 1.1+ where we can because TLS 1.0 has known
     # vulnerabilities (like BEAST and POODLE). We allow users to downgrade to
     # TLS 1.0+ via config options in case a legacy server is encountered.
-    if 'tls1.1' in supportedprotocols:
-        defaultprotocol = 'tls1.1'
+    if b'tls1.1' in supportedprotocols:
+        defaultprotocol = b'tls1.1'
     else:
         # Let people know they are borderline secure.
         # We don't document this config option because we want people to see
         # the bold warnings on the web site.
         # internal config: hostsecurity.disabletls10warning
-        if not ui.configbool('hostsecurity', 'disabletls10warning'):
+        if not ui.configbool(b'hostsecurity', b'disabletls10warning'):
             ui.warn(
                 _(
-                    'warning: connecting to %s using legacy security '
-                    'technology (TLS 1.0); see '
-                    'https://mercurial-scm.org/wiki/SecureConnections for '
-                    'more info\n'
+                    b'warning: connecting to %s using legacy security '
+                    b'technology (TLS 1.0); see '
+                    b'https://mercurial-scm.org/wiki/SecureConnections for '
+                    b'more info\n'
                 )
                 % bhostname
             )
-        defaultprotocol = 'tls1.0'
+        defaultprotocol = b'tls1.0'
 
-    key = 'minimumprotocol'
-    protocol = ui.config('hostsecurity', key, defaultprotocol)
+    key = b'minimumprotocol'
+    protocol = ui.config(b'hostsecurity', key, defaultprotocol)
     validateprotocol(protocol, key)
 
-    key = '%s:minimumprotocol' % bhostname
-    protocol = ui.config('hostsecurity', key, protocol)
+    key = b'%s:minimumprotocol' % bhostname
+    protocol = ui.config(b'hostsecurity', key, protocol)
     validateprotocol(protocol, key)
 
     # If --insecure is used, we allow the use of TLS 1.0 despite config options.
     # We always print a "connection security to %s is disabled..." message when
     # --insecure is used. So no need to print anything more here.
     if ui.insecureconnections:
-        protocol = 'tls1.0'
-
-    s['protocol'], s['ctxoptions'], s['protocolui'] = protocolsettings(protocol)
+        protocol = b'tls1.0'
 
-    ciphers = ui.config('hostsecurity', 'ciphers')
-    ciphers = ui.config('hostsecurity', '%s:ciphers' % bhostname, ciphers)
-    s['ciphers'] = ciphers
+    s[b'protocol'], s[b'ctxoptions'], s[b'protocolui'] = protocolsettings(
+        protocol
+    )
+
+    ciphers = ui.config(b'hostsecurity', b'ciphers')
+    ciphers = ui.config(b'hostsecurity', b'%s:ciphers' % bhostname, ciphers)
+    s[b'ciphers'] = ciphers
 
     # Look for fingerprints in [hostsecurity] section. Value is a list
     # of <alg>:<fingerprint> strings.
-    fingerprints = ui.configlist('hostsecurity', '%s:fingerprints' % bhostname)
+    fingerprints = ui.configlist(
+        b'hostsecurity', b'%s:fingerprints' % bhostname
+    )
     for fingerprint in fingerprints:
-        if not (fingerprint.startswith(('sha1:', 'sha256:', 'sha512:'))):
+        if not (fingerprint.startswith((b'sha1:', b'sha256:', b'sha512:'))):
             raise error.Abort(
-                _('invalid fingerprint for %s: %s') % (bhostname, fingerprint),
-                hint=_('must begin with "sha1:", "sha256:", ' 'or "sha512:"'),
+                _(b'invalid fingerprint for %s: %s') % (bhostname, fingerprint),
+                hint=_(b'must begin with "sha1:", "sha256:", ' b'or "sha512:"'),
             )
 
-        alg, fingerprint = fingerprint.split(':', 1)
-        fingerprint = fingerprint.replace(':', '').lower()
-        s['certfingerprints'].append((alg, fingerprint))
+        alg, fingerprint = fingerprint.split(b':', 1)
+        fingerprint = fingerprint.replace(b':', b'').lower()
+        s[b'certfingerprints'].append((alg, fingerprint))
 
     # Fingerprints from [hostfingerprints] are always SHA-1.
-    for fingerprint in ui.configlist('hostfingerprints', bhostname):
-        fingerprint = fingerprint.replace(':', '').lower()
-        s['certfingerprints'].append(('sha1', fingerprint))
-        s['legacyfingerprint'] = True
+    for fingerprint in ui.configlist(b'hostfingerprints', bhostname):
+        fingerprint = fingerprint.replace(b':', b'').lower()
+        s[b'certfingerprints'].append((b'sha1', fingerprint))
+        s[b'legacyfingerprint'] = True
 
     # If a host cert fingerprint is defined, it is the only thing that
     # matters. No need to validate CA certs.
-    if s['certfingerprints']:
-        s['verifymode'] = ssl.CERT_NONE
-        s['allowloaddefaultcerts'] = False
+    if s[b'certfingerprints']:
+        s[b'verifymode'] = ssl.CERT_NONE
+        s[b'allowloaddefaultcerts'] = False
 
     # If --insecure is used, don't take CAs into consideration.
     elif ui.insecureconnections:
-        s['disablecertverification'] = True
-        s['verifymode'] = ssl.CERT_NONE
-        s['allowloaddefaultcerts'] = False
+        s[b'disablecertverification'] = True
+        s[b'verifymode'] = ssl.CERT_NONE
+        s[b'allowloaddefaultcerts'] = False
 
-    if ui.configbool('devel', 'disableloaddefaultcerts'):
-        s['allowloaddefaultcerts'] = False
+    if ui.configbool(b'devel', b'disableloaddefaultcerts'):
+        s[b'allowloaddefaultcerts'] = False
 
     # If both fingerprints and a per-host ca file are specified, issue a warning
     # because users should not be surprised about what security is or isn't
     # being performed.
-    cafile = ui.config('hostsecurity', '%s:verifycertsfile' % bhostname)
-    if s['certfingerprints'] and cafile:
+    cafile = ui.config(b'hostsecurity', b'%s:verifycertsfile' % bhostname)
+    if s[b'certfingerprints'] and cafile:
         ui.warn(
             _(
-                '(hostsecurity.%s:verifycertsfile ignored when host '
-                'fingerprints defined; using host fingerprints for '
-                'verification)\n'
+                b'(hostsecurity.%s:verifycertsfile ignored when host '
+                b'fingerprints defined; using host fingerprints for '
+                b'verification)\n'
             )
             % bhostname
         )
 
     # Try to hook up CA certificate validation unless something above
     # makes it not necessary.
-    if s['verifymode'] is None:
+    if s[b'verifymode'] is None:
         # Look at per-host ca file first.
         if cafile:
             cafile = util.expandpath(cafile)
             if not os.path.exists(cafile):
                 raise error.Abort(
-                    _('path specified by %s does not exist: %s')
-                    % ('hostsecurity.%s:verifycertsfile' % (bhostname,), cafile)
+                    _(b'path specified by %s does not exist: %s')
+                    % (
+                        b'hostsecurity.%s:verifycertsfile' % (bhostname,),
+                        cafile,
+                    )
                 )
-            s['cafile'] = cafile
+            s[b'cafile'] = cafile
         else:
             # Find global certificates file in config.
-            cafile = ui.config('web', 'cacerts')
+            cafile = ui.config(b'web', b'cacerts')
 
             if cafile:
                 cafile = util.expandpath(cafile)
                 if not os.path.exists(cafile):
                     raise error.Abort(
-                        _('could not find web.cacerts: %s') % cafile
+                        _(b'could not find web.cacerts: %s') % cafile
                     )
-            elif s['allowloaddefaultcerts']:
+            elif s[b'allowloaddefaultcerts']:
                 # CAs not defined in config. Try to find system bundles.
                 cafile = _defaultcacerts(ui)
                 if cafile:
-                    ui.debug('using %s for CA file\n' % cafile)
+                    ui.debug(b'using %s for CA file\n' % cafile)
 
-            s['cafile'] = cafile
+            s[b'cafile'] = cafile
 
         # Require certificate validation if CA certs are being loaded and
         # verification hasn't been disabled above.
-        if cafile or (_canloaddefaultcerts and s['allowloaddefaultcerts']):
-            s['verifymode'] = ssl.CERT_REQUIRED
+        if cafile or (_canloaddefaultcerts and s[b'allowloaddefaultcerts']):
+            s[b'verifymode'] = ssl.CERT_REQUIRED
         else:
             # At this point we don't have a fingerprint, aren't being
             # explicitly insecure, and can't load CA certs. Connecting
             # is insecure. We allow the connection and abort during
             # validation (once we have the fingerprint to print to the
             # user).
-            s['verifymode'] = ssl.CERT_NONE
+            s[b'verifymode'] = ssl.CERT_NONE
 
-    assert s['protocol'] is not None
-    assert s['ctxoptions'] is not None
-    assert s['verifymode'] is not None
+    assert s[b'protocol'] is not None
+    assert s[b'ctxoptions'] is not None
+    assert s[b'verifymode'] is not None
 
     return s
 
@@ -304,7 +311,7 @@
     of the ``minimumprotocol`` config option equivalent.
     """
     if protocol not in configprotocols:
-        raise ValueError('protocol value not supported: %s' % protocol)
+        raise ValueError(b'protocol value not supported: %s' % protocol)
 
     # Despite its name, PROTOCOL_SSLv23 selects the highest protocol
     # that both ends support, including TLS protocols. On legacy stacks,
@@ -317,18 +324,18 @@
     # disable protocols via SSLContext.options and OP_NO_* constants.
     # However, SSLContext.options doesn't work unless we have the
     # full/real SSLContext available to us.
-    if supportedprotocols == {'tls1.0'}:
-        if protocol != 'tls1.0':
+    if supportedprotocols == {b'tls1.0'}:
+        if protocol != b'tls1.0':
             raise error.Abort(
-                _('current Python does not support protocol ' 'setting %s')
+                _(b'current Python does not support protocol ' b'setting %s')
                 % protocol,
                 hint=_(
-                    'upgrade Python or disable setting since '
-                    'only TLS 1.0 is supported'
+                    b'upgrade Python or disable setting since '
+                    b'only TLS 1.0 is supported'
                 ),
             )
 
-        return ssl.PROTOCOL_TLSv1, 0, 'tls1.0'
+        return ssl.PROTOCOL_TLSv1, 0, b'tls1.0'
 
     # WARNING: returned options don't work unless the modern ssl module
     # is available. Be careful when adding options here.
@@ -336,15 +343,15 @@
     # SSLv2 and SSLv3 are broken. We ban them outright.
     options = ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3
 
-    if protocol == 'tls1.0':
+    if protocol == b'tls1.0':
         # Defaults above are to use TLS 1.0+
         pass
-    elif protocol == 'tls1.1':
+    elif protocol == b'tls1.1':
         options |= ssl.OP_NO_TLSv1
-    elif protocol == 'tls1.2':
+    elif protocol == b'tls1.2':
         options |= ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1
     else:
-        raise error.Abort(_('this should not happen'))
+        raise error.Abort(_(b'this should not happen'))
 
     # Prevent CRIME.
     # There is no guarantee this attribute is defined on the module.
@@ -367,7 +374,7 @@
       to use.
     """
     if not serverhostname:
-        raise error.Abort(_('serverhostname argument is required'))
+        raise error.Abort(_(b'serverhostname argument is required'))
 
     if b'SSLKEYLOGFILE' in encoding.environ:
         try:
@@ -388,11 +395,11 @@
     for f in (keyfile, certfile):
         if f and not os.path.exists(f):
             raise error.Abort(
-                _('certificate file (%s) does not exist; cannot connect to %s')
+                _(b'certificate file (%s) does not exist; cannot connect to %s')
                 % (f, pycompat.bytesurl(serverhostname)),
                 hint=_(
-                    'restore missing file or fix references '
-                    'in Mercurial config'
+                    b'restore missing file or fix references '
+                    b'in Mercurial config'
                 ),
             )
 
@@ -405,48 +412,48 @@
     # bundle with a specific CA cert removed. If the system/default CA bundle
     # is loaded and contains that removed CA, you've just undone the user's
     # choice.
-    sslcontext = SSLContext(settings['protocol'])
+    sslcontext = SSLContext(settings[b'protocol'])
 
     # This is a no-op unless using modern ssl.
-    sslcontext.options |= settings['ctxoptions']
+    sslcontext.options |= settings[b'ctxoptions']
 
     # This still works on our fake SSLContext.
-    sslcontext.verify_mode = settings['verifymode']
+    sslcontext.verify_mode = settings[b'verifymode']
 
-    if settings['ciphers']:
+    if settings[b'ciphers']:
         try:
-            sslcontext.set_ciphers(pycompat.sysstr(settings['ciphers']))
+            sslcontext.set_ciphers(pycompat.sysstr(settings[b'ciphers']))
         except ssl.SSLError as e:
             raise error.Abort(
-                _('could not set ciphers: %s')
+                _(b'could not set ciphers: %s')
                 % stringutil.forcebytestr(e.args[0]),
-                hint=_('change cipher string (%s) in config')
-                % settings['ciphers'],
+                hint=_(b'change cipher string (%s) in config')
+                % settings[b'ciphers'],
             )
 
     if certfile is not None:
 
         def password():
             f = keyfile or certfile
-            return ui.getpass(_('passphrase for %s: ') % f, '')
+            return ui.getpass(_(b'passphrase for %s: ') % f, b'')
 
         sslcontext.load_cert_chain(certfile, keyfile, password)
 
-    if settings['cafile'] is not None:
+    if settings[b'cafile'] is not None:
         try:
-            sslcontext.load_verify_locations(cafile=settings['cafile'])
+            sslcontext.load_verify_locations(cafile=settings[b'cafile'])
         except ssl.SSLError as e:
             if len(e.args) == 1:  # pypy has different SSLError args
                 msg = e.args[0]
             else:
                 msg = e.args[1]
             raise error.Abort(
-                _('error loading CA file %s: %s')
-                % (settings['cafile'], stringutil.forcebytestr(msg)),
-                hint=_('file is empty or malformed?'),
+                _(b'error loading CA file %s: %s')
+                % (settings[b'cafile'], stringutil.forcebytestr(msg)),
+                hint=_(b'file is empty or malformed?'),
             )
         caloaded = True
-    elif settings['allowloaddefaultcerts']:
+    elif settings[b'allowloaddefaultcerts']:
         # This is a no-op on old Python.
         sslcontext.load_default_certs()
         caloaded = True
@@ -468,24 +475,24 @@
         try:
             if (
                 caloaded
-                and settings['verifymode'] == ssl.CERT_REQUIRED
+                and settings[b'verifymode'] == ssl.CERT_REQUIRED
                 and modernssl
                 and not sslcontext.get_ca_certs()
             ):
                 ui.warn(
                     _(
-                        '(an attempt was made to load CA certificates but '
-                        'none were loaded; see '
-                        'https://mercurial-scm.org/wiki/SecureConnections '
-                        'for how to configure Mercurial to avoid this '
-                        'error)\n'
+                        b'(an attempt was made to load CA certificates but '
+                        b'none were loaded; see '
+                        b'https://mercurial-scm.org/wiki/SecureConnections '
+                        b'for how to configure Mercurial to avoid this '
+                        b'error)\n'
                     )
                 )
         except ssl.SSLError:
             pass
 
         # Try to print more helpful error messages for known failures.
-        if util.safehasattr(e, 'reason'):
+        if util.safehasattr(e, b'reason'):
             # This error occurs when the client and server don't share a
             # common/supported SSL/TLS protocol. We've disabled SSLv2 and SSLv3
             # outright. Hopefully the reason for this error is that we require
@@ -493,36 +500,36 @@
             # reason, try to emit an actionable warning.
             if e.reason == r'UNSUPPORTED_PROTOCOL':
                 # We attempted TLS 1.0+.
-                if settings['protocolui'] == 'tls1.0':
+                if settings[b'protocolui'] == b'tls1.0':
                     # We support more than just TLS 1.0+. If this happens,
                     # the likely scenario is either the client or the server
                     # is really old. (e.g. server doesn't support TLS 1.0+ or
                     # client doesn't support modern TLS versions introduced
                     # several years from when this comment was written).
-                    if supportedprotocols != {'tls1.0'}:
+                    if supportedprotocols != {b'tls1.0'}:
                         ui.warn(
                             _(
-                                '(could not communicate with %s using security '
-                                'protocols %s; if you are using a modern Mercurial '
-                                'version, consider contacting the operator of this '
-                                'server; see '
-                                'https://mercurial-scm.org/wiki/SecureConnections '
-                                'for more info)\n'
+                                b'(could not communicate with %s using security '
+                                b'protocols %s; if you are using a modern Mercurial '
+                                b'version, consider contacting the operator of this '
+                                b'server; see '
+                                b'https://mercurial-scm.org/wiki/SecureConnections '
+                                b'for more info)\n'
                             )
                             % (
                                 pycompat.bytesurl(serverhostname),
-                                ', '.join(sorted(supportedprotocols)),
+                                b', '.join(sorted(supportedprotocols)),
                             )
                         )
                     else:
                         ui.warn(
                             _(
-                                '(could not communicate with %s using TLS 1.0; the '
-                                'likely cause of this is the server no longer '
-                                'supports TLS 1.0 because it has known security '
-                                'vulnerabilities; see '
-                                'https://mercurial-scm.org/wiki/SecureConnections '
-                                'for more info)\n'
+                                b'(could not communicate with %s using TLS 1.0; the '
+                                b'likely cause of this is the server no longer '
+                                b'supports TLS 1.0 because it has known security '
+                                b'vulnerabilities; see '
+                                b'https://mercurial-scm.org/wiki/SecureConnections '
+                                b'for more info)\n'
                             )
                             % pycompat.bytesurl(serverhostname)
                         )
@@ -533,30 +540,30 @@
                     # offer.
                     ui.warn(
                         _(
-                            '(could not negotiate a common security protocol (%s+) '
-                            'with %s; the likely cause is Mercurial is configured '
-                            'to be more secure than the server can support)\n'
+                            b'(could not negotiate a common security protocol (%s+) '
+                            b'with %s; the likely cause is Mercurial is configured '
+                            b'to be more secure than the server can support)\n'
                         )
                         % (
-                            settings['protocolui'],
+                            settings[b'protocolui'],
                             pycompat.bytesurl(serverhostname),
                         )
                     )
                     ui.warn(
                         _(
-                            '(consider contacting the operator of this '
-                            'server and ask them to support modern TLS '
-                            'protocol versions; or, set '
-                            'hostsecurity.%s:minimumprotocol=tls1.0 to allow '
-                            'use of legacy, less secure protocols when '
-                            'communicating with this server)\n'
+                            b'(consider contacting the operator of this '
+                            b'server and ask them to support modern TLS '
+                            b'protocol versions; or, set '
+                            b'hostsecurity.%s:minimumprotocol=tls1.0 to allow '
+                            b'use of legacy, less secure protocols when '
+                            b'communicating with this server)\n'
                         )
                         % pycompat.bytesurl(serverhostname)
                     )
                     ui.warn(
                         _(
-                            '(see https://mercurial-scm.org/wiki/SecureConnections '
-                            'for more info)\n'
+                            b'(see https://mercurial-scm.org/wiki/SecureConnections '
+                            b'for more info)\n'
                         )
                     )
 
@@ -566,8 +573,8 @@
 
                 ui.warn(
                     _(
-                        '(the full certificate chain may not be available '
-                        'locally; see "hg help debugssl")\n'
+                        b'(the full certificate chain may not be available '
+                        b'locally; see "hg help debugssl")\n'
                     )
                 )
         raise
@@ -576,13 +583,13 @@
     # closed
     # - see http://bugs.python.org/issue13721
     if not sslsocket.cipher():
-        raise error.Abort(_('ssl connection failed'))
+        raise error.Abort(_(b'ssl connection failed'))
 
     sslsocket._hgstate = {
-        'caloaded': caloaded,
-        'hostname': serverhostname,
-        'settings': settings,
-        'ui': ui,
+        b'caloaded': caloaded,
+        b'hostname': serverhostname,
+        b'settings': settings,
+        b'ui': ui,
     }
 
     return sslsocket
@@ -608,27 +615,27 @@
     for f in (certfile, keyfile, cafile):
         if f and not os.path.exists(f):
             raise error.Abort(
-                _('referenced certificate file (%s) does not ' 'exist') % f
+                _(b'referenced certificate file (%s) does not ' b'exist') % f
             )
 
-    protocol, options, _protocolui = protocolsettings('tls1.0')
+    protocol, options, _protocolui = protocolsettings(b'tls1.0')
 
     # This config option is intended for use in tests only. It is a giant
     # footgun to kill security. Don't define it.
-    exactprotocol = ui.config('devel', 'serverexactprotocol')
-    if exactprotocol == 'tls1.0':
+    exactprotocol = ui.config(b'devel', b'serverexactprotocol')
+    if exactprotocol == b'tls1.0':
         protocol = ssl.PROTOCOL_TLSv1
-    elif exactprotocol == 'tls1.1':
-        if 'tls1.1' not in supportedprotocols:
-            raise error.Abort(_('TLS 1.1 not supported by this Python'))
+    elif exactprotocol == b'tls1.1':
+        if b'tls1.1' not in supportedprotocols:
+            raise error.Abort(_(b'TLS 1.1 not supported by this Python'))
         protocol = ssl.PROTOCOL_TLSv1_1
-    elif exactprotocol == 'tls1.2':
-        if 'tls1.2' not in supportedprotocols:
-            raise error.Abort(_('TLS 1.2 not supported by this Python'))
+    elif exactprotocol == b'tls1.2':
+        if b'tls1.2' not in supportedprotocols:
+            raise error.Abort(_(b'TLS 1.2 not supported by this Python'))
         protocol = ssl.PROTOCOL_TLSv1_2
     elif exactprotocol:
         raise error.Abort(
-            _('invalid value for serverexactprotocol: %s') % exactprotocol
+            _(b'invalid value for serverexactprotocol: %s') % exactprotocol
         )
 
     if modernssl:
@@ -643,7 +650,7 @@
         sslcontext.options |= getattr(ssl, 'OP_SINGLE_ECDH_USE', 0)
 
         # Use the list of more secure ciphers if found in the ssl module.
-        if util.safehasattr(ssl, '_RESTRICTED_SERVER_CIPHERS'):
+        if util.safehasattr(ssl, b'_RESTRICTED_SERVER_CIPHERS'):
             sslcontext.options |= getattr(ssl, 'OP_CIPHER_SERVER_PREFERENCE', 0)
             sslcontext.set_ciphers(ssl._RESTRICTED_SERVER_CIPHERS)
     else:
@@ -681,13 +688,13 @@
     dn = pycompat.bytesurl(dn)
     hostname = pycompat.bytesurl(hostname)
 
-    pieces = dn.split('.')
+    pieces = dn.split(b'.')
     leftmost = pieces[0]
     remainder = pieces[1:]
-    wildcards = leftmost.count('*')
+    wildcards = leftmost.count(b'*')
     if wildcards > maxwildcards:
         raise wildcarderror(
-            _('too many wildcards in certificate DNS name: %s') % dn
+            _(b'too many wildcards in certificate DNS name: %s') % dn
         )
 
     # speed up common case w/o wildcards
@@ -697,11 +704,11 @@
     # RFC 6125, section 6.4.3, subitem 1.
     # The client SHOULD NOT attempt to match a presented identifier in which
     # the wildcard character comprises a label other than the left-most label.
-    if leftmost == '*':
+    if leftmost == b'*':
         # When '*' is a fragment by itself, it matches a non-empty dotless
         # fragment.
-        pats.append('[^.]+')
-    elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
+        pats.append(b'[^.]+')
+    elif leftmost.startswith(b'xn--') or hostname.startswith(b'xn--'):
         # RFC 6125, section 6.4.3, subitem 3.
         # The client SHOULD NOT attempt to match a presented identifier
         # where the wildcard character is embedded within an A-label or
@@ -709,7 +716,7 @@
         pats.append(stringutil.reescape(leftmost))
     else:
         # Otherwise, '*' matches any dotless string, e.g. www*
-        pats.append(stringutil.reescape(leftmost).replace(br'\*', '[^.]*'))
+        pats.append(stringutil.reescape(leftmost).replace(br'\*', b'[^.]*'))
 
     # add the remaining fragments, ignore any wildcards
     for frag in remainder:
@@ -726,7 +733,7 @@
     Returns error message if any problems are found and None on success.
     '''
     if not cert:
-        return _('no certificate received')
+        return _(b'no certificate received')
 
     dnsnames = []
     san = cert.get(r'subjectAltName', [])
@@ -751,7 +758,7 @@
                     try:
                         value = value.encode('ascii')
                     except UnicodeEncodeError:
-                        return _('IDN in certificate not supported')
+                        return _(b'IDN in certificate not supported')
 
                     try:
                         if _dnsnamematch(value, hostname):
@@ -763,11 +770,11 @@
 
     dnsnames = [pycompat.bytesurl(d) for d in dnsnames]
     if len(dnsnames) > 1:
-        return _('certificate is for %s') % ', '.join(dnsnames)
+        return _(b'certificate is for %s') % b', '.join(dnsnames)
     elif len(dnsnames) == 1:
-        return _('certificate is for %s') % dnsnames[0]
+        return _(b'certificate is for %s') % dnsnames[0]
     else:
-        return _('no commonName or subjectAltName found in certificate')
+        return _(b'no commonName or subjectAltName found in certificate')
 
 
 def _plainapplepython():
@@ -785,16 +792,16 @@
     ):
         return False
     exe = os.path.realpath(pycompat.sysexecutable).lower()
-    return exe.startswith('/usr/bin/python') or exe.startswith(
-        '/system/library/frameworks/python.framework/'
+    return exe.startswith(b'/usr/bin/python') or exe.startswith(
+        b'/system/library/frameworks/python.framework/'
     )
 
 
 _systemcacertpaths = [
     # RHEL, CentOS, and Fedora
-    '/etc/pki/tls/certs/ca-bundle.trust.crt',
+    b'/etc/pki/tls/certs/ca-bundle.trust.crt',
     # Debian, Ubuntu, Gentoo
-    '/etc/ssl/certs/ca-certificates.crt',
+    b'/etc/ssl/certs/ca-certificates.crt',
 ]
 
 
@@ -815,7 +822,7 @@
 
         certs = certifi.where()
         if os.path.exists(certs):
-            ui.debug('using ca certificates from certifi\n')
+            ui.debug(b'using ca certificates from certifi\n')
             return pycompat.fsencode(certs)
     except (ImportError, AttributeError):
         pass
@@ -829,9 +836,9 @@
         if not _canloaddefaultcerts:
             ui.warn(
                 _(
-                    '(unable to load Windows CA certificates; see '
-                    'https://mercurial-scm.org/wiki/SecureConnections for '
-                    'how to configure Mercurial to avoid this message)\n'
+                    b'(unable to load Windows CA certificates; see '
+                    b'https://mercurial-scm.org/wiki/SecureConnections for '
+                    b'how to configure Mercurial to avoid this message)\n'
                 )
             )
 
@@ -842,7 +849,7 @@
     # trick.
     if _plainapplepython():
         dummycert = os.path.join(
-            os.path.dirname(pycompat.fsencode(__file__)), 'dummycert.pem'
+            os.path.dirname(pycompat.fsencode(__file__)), b'dummycert.pem'
         )
         if os.path.exists(dummycert):
             return dummycert
@@ -856,9 +863,9 @@
         if not _canloaddefaultcerts:
             ui.warn(
                 _(
-                    '(unable to load CA certificates; see '
-                    'https://mercurial-scm.org/wiki/SecureConnections for '
-                    'how to configure Mercurial to avoid this message)\n'
+                    b'(unable to load CA certificates; see '
+                    b'https://mercurial-scm.org/wiki/SecureConnections for '
+                    b'how to configure Mercurial to avoid this message)\n'
                 )
             )
         return None
@@ -880,12 +887,12 @@
             if os.path.isfile(path):
                 ui.warn(
                     _(
-                        '(using CA certificates from %s; if you see this '
-                        'message, your Mercurial install is not properly '
-                        'configured; see '
-                        'https://mercurial-scm.org/wiki/SecureConnections '
-                        'for how to configure Mercurial to avoid this '
-                        'message)\n'
+                        b'(using CA certificates from %s; if you see this '
+                        b'message, your Mercurial install is not properly '
+                        b'configured; see '
+                        b'https://mercurial-scm.org/wiki/SecureConnections '
+                        b'for how to configure Mercurial to avoid this '
+                        b'message)\n'
                     )
                     % path
                 )
@@ -893,9 +900,9 @@
 
         ui.warn(
             _(
-                '(unable to load CA certificates; see '
-                'https://mercurial-scm.org/wiki/SecureConnections for '
-                'how to configure Mercurial to avoid this message)\n'
+                b'(unable to load CA certificates; see '
+                b'https://mercurial-scm.org/wiki/SecureConnections for '
+                b'how to configure Mercurial to avoid this message)\n'
             )
         )
 
@@ -907,23 +914,23 @@
 
     The passed socket must have been created with ``wrapsocket()``.
     """
-    shost = sock._hgstate['hostname']
+    shost = sock._hgstate[b'hostname']
     host = pycompat.bytesurl(shost)
-    ui = sock._hgstate['ui']
-    settings = sock._hgstate['settings']
+    ui = sock._hgstate[b'ui']
+    settings = sock._hgstate[b'settings']
 
     try:
         peercert = sock.getpeercert(True)
         peercert2 = sock.getpeercert()
     except AttributeError:
-        raise error.Abort(_('%s ssl connection error') % host)
+        raise error.Abort(_(b'%s ssl connection error') % host)
 
     if not peercert:
         raise error.Abort(
-            _('%s certificate error: ' 'no certificate received') % host
+            _(b'%s certificate error: ' b'no certificate received') % host
         )
 
-    if settings['disablecertverification']:
+    if settings[b'disablecertverification']:
         # We don't print the certificate fingerprint because it shouldn't
         # be necessary: if the user requested certificate verification be
         # disabled, they presumably already saw a message about the inability
@@ -932,9 +939,9 @@
         # value.
         ui.warn(
             _(
-                'warning: connection security to %s is disabled per current '
-                'settings; communication is susceptible to eavesdropping '
-                'and tampering\n'
+                b'warning: connection security to %s is disabled per current '
+                b'settings; communication is susceptible to eavesdropping '
+                b'and tampering\n'
             )
             % host
         )
@@ -943,63 +950,63 @@
     # If a certificate fingerprint is pinned, use it and only it to
     # validate the remote cert.
     peerfingerprints = {
-        'sha1': node.hex(hashlib.sha1(peercert).digest()),
-        'sha256': node.hex(hashlib.sha256(peercert).digest()),
-        'sha512': node.hex(hashlib.sha512(peercert).digest()),
+        b'sha1': node.hex(hashlib.sha1(peercert).digest()),
+        b'sha256': node.hex(hashlib.sha256(peercert).digest()),
+        b'sha512': node.hex(hashlib.sha512(peercert).digest()),
     }
 
     def fmtfingerprint(s):
-        return ':'.join([s[x : x + 2] for x in range(0, len(s), 2)])
+        return b':'.join([s[x : x + 2] for x in range(0, len(s), 2)])
 
-    nicefingerprint = 'sha256:%s' % fmtfingerprint(peerfingerprints['sha256'])
+    nicefingerprint = b'sha256:%s' % fmtfingerprint(peerfingerprints[b'sha256'])
 
-    if settings['certfingerprints']:
-        for hash, fingerprint in settings['certfingerprints']:
+    if settings[b'certfingerprints']:
+        for hash, fingerprint in settings[b'certfingerprints']:
             if peerfingerprints[hash].lower() == fingerprint:
                 ui.debug(
-                    '%s certificate matched fingerprint %s:%s\n'
+                    b'%s certificate matched fingerprint %s:%s\n'
                     % (host, hash, fmtfingerprint(fingerprint))
                 )
-                if settings['legacyfingerprint']:
+                if settings[b'legacyfingerprint']:
                     ui.warn(
                         _(
-                            '(SHA-1 fingerprint for %s found in legacy '
-                            '[hostfingerprints] section; '
-                            'if you trust this fingerprint, remove the old '
-                            'SHA-1 fingerprint from [hostfingerprints] and '
-                            'add the following entry to the new '
-                            '[hostsecurity] section: %s:fingerprints=%s)\n'
+                            b'(SHA-1 fingerprint for %s found in legacy '
+                            b'[hostfingerprints] section; '
+                            b'if you trust this fingerprint, remove the old '
+                            b'SHA-1 fingerprint from [hostfingerprints] and '
+                            b'add the following entry to the new '
+                            b'[hostsecurity] section: %s:fingerprints=%s)\n'
                         )
                         % (host, host, nicefingerprint)
                     )
                 return
 
         # Pinned fingerprint didn't match. This is a fatal error.
-        if settings['legacyfingerprint']:
-            section = 'hostfingerprint'
-            nice = fmtfingerprint(peerfingerprints['sha1'])
+        if settings[b'legacyfingerprint']:
+            section = b'hostfingerprint'
+            nice = fmtfingerprint(peerfingerprints[b'sha1'])
         else:
-            section = 'hostsecurity'
-            nice = '%s:%s' % (hash, fmtfingerprint(peerfingerprints[hash]))
+            section = b'hostsecurity'
+            nice = b'%s:%s' % (hash, fmtfingerprint(peerfingerprints[hash]))
         raise error.Abort(
-            _('certificate for %s has unexpected ' 'fingerprint %s')
+            _(b'certificate for %s has unexpected ' b'fingerprint %s')
             % (host, nice),
-            hint=_('check %s configuration') % section,
+            hint=_(b'check %s configuration') % section,
         )
 
     # Security is enabled but no CAs are loaded. We can't establish trust
     # for the cert so abort.
-    if not sock._hgstate['caloaded']:
+    if not sock._hgstate[b'caloaded']:
         raise error.Abort(
             _(
-                'unable to verify security of %s (no loaded CA certificates); '
-                'refusing to connect'
+                b'unable to verify security of %s (no loaded CA certificates); '
+                b'refusing to connect'
             )
             % host,
             hint=_(
-                'see https://mercurial-scm.org/wiki/SecureConnections for '
-                'how to configure Mercurial to avoid this error or set '
-                'hostsecurity.%s:fingerprints=%s to trust this server'
+                b'see https://mercurial-scm.org/wiki/SecureConnections for '
+                b'how to configure Mercurial to avoid this error or set '
+                b'hostsecurity.%s:fingerprints=%s to trust this server'
             )
             % (host, nicefingerprint),
         )
@@ -1007,11 +1014,11 @@
     msg = _verifycert(peercert2, shost)
     if msg:
         raise error.Abort(
-            _('%s certificate error: %s') % (host, msg),
+            _(b'%s certificate error: %s') % (host, msg),
             hint=_(
-                'set hostsecurity.%s:certfingerprints=%s '
-                'config setting or use --insecure to connect '
-                'insecurely'
+                b'set hostsecurity.%s:certfingerprints=%s '
+                b'config setting or use --insecure to connect '
+                b'insecurely'
             )
             % (host, nicefingerprint),
         )
--- a/mercurial/stack.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/stack.py	Sun Oct 06 09:48:39 2019 -0400
@@ -16,9 +16,9 @@
     the revision and are not merges.
     """
     if rev is None:
-        rev = '.'
+        rev = b'.'
 
-    revspec = 'only(%s) and not public() and not ::merge()'
+    revspec = b'only(%s) and not public() and not ::merge()'
     revisions = repo.revs(revspec, rev)
     revisions.sort()
     return revisions
--- a/mercurial/state.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/state.py	Sun Oct 06 09:48:39 2019 -0400
@@ -60,23 +60,23 @@
         """
         if not isinstance(version, int):
             raise error.ProgrammingError(
-                "version of state file should be" " an integer"
+                b"version of state file should be" b" an integer"
             )
 
-        with self._repo.vfs(self.fname, 'wb', atomictemp=True) as fp:
-            fp.write('%d\n' % version)
+        with self._repo.vfs(self.fname, b'wb', atomictemp=True) as fp:
+            fp.write(b'%d\n' % version)
             for chunk in cborutil.streamencode(data):
                 fp.write(chunk)
 
     def _read(self):
         """reads the state file and returns a dictionary which contain
         data in the same format as it was before storing"""
-        with self._repo.vfs(self.fname, 'rb') as fp:
+        with self._repo.vfs(self.fname, b'rb') as fp:
             try:
                 int(fp.readline())
             except ValueError:
                 raise error.CorruptedState(
-                    "unknown version of state file" " found"
+                    b"unknown version of state file" b" found"
                 )
 
             return cborutil.decodeall(fp.read())[0]
@@ -133,12 +133,12 @@
         """
         if not self._statushint:
             hint = _(
-                'To continue:    hg %s --continue\n'
-                'To abort:       hg %s --abort'
+                b'To continue:    hg %s --continue\n'
+                b'To abort:       hg %s --abort'
             ) % (self._opname, self._opname)
             if self._stopflag:
                 hint = hint + (
-                    _('\nTo stop:        hg %s --stop') % (self._opname)
+                    _(b'\nTo stop:        hg %s --stop') % (self._opname)
                 )
             return hint
         return self._statushint
@@ -148,7 +148,7 @@
         operation
         """
         if not self._cmdhint:
-            return _("use 'hg %s --continue' or 'hg %s --abort'") % (
+            return _(b"use 'hg %s --continue' or 'hg %s --abort'") % (
                 self._opname,
                 self._opname,
             )
@@ -157,18 +157,18 @@
     def msg(self):
         """returns the status message corresponding to the command"""
         if not self._cmdmsg:
-            return _('%s in progress') % (self._opname)
+            return _(b'%s in progress') % (self._opname)
         return self._cmdmsg
 
     def continuemsg(self):
         """ returns appropriate continue message corresponding to command"""
-        return _('hg %s --continue') % (self._opname)
+        return _(b'hg %s --continue') % (self._opname)
 
     def isunfinished(self, repo):
         """determines whether a multi-step operation is in progress
         or not
         """
-        if self._opname == 'merge':
+        if self._opname == b'merge':
             return len(repo[None].parents()) > 1
         else:
             return repo.vfs.exists(self._fname)
@@ -186,9 +186,9 @@
     reportonly=False,
     continueflag=False,
     stopflag=False,
-    cmdmsg="",
-    cmdhint="",
-    statushint="",
+    cmdmsg=b"",
+    cmdhint=b"",
+    statushint=b"",
     abortfunc=None,
     continuefunc=None,
 ):
@@ -233,36 +233,36 @@
         abortfunc,
         continuefunc,
     )
-    if opname == 'merge':
+    if opname == b'merge':
         _unfinishedstates.append(statecheckobj)
     else:
         _unfinishedstates.insert(0, statecheckobj)
 
 
 addunfinished(
-    'update',
-    fname='updatestate',
+    b'update',
+    fname=b'updatestate',
     clearable=True,
-    cmdmsg=_('last update was interrupted'),
-    cmdhint=_("use 'hg update' to get a consistent checkout"),
-    statushint=_("To continue:    hg update ."),
+    cmdmsg=_(b'last update was interrupted'),
+    cmdhint=_(b"use 'hg update' to get a consistent checkout"),
+    statushint=_(b"To continue:    hg update ."),
 )
 addunfinished(
-    'bisect',
-    fname='bisect.state',
+    b'bisect',
+    fname=b'bisect.state',
     allowcommit=True,
     reportonly=True,
     statushint=_(
-        'To mark the changeset good:    hg bisect --good\n'
-        'To mark the changeset bad:     hg bisect --bad\n'
-        'To abort:                      hg bisect --reset\n'
+        b'To mark the changeset good:    hg bisect --good\n'
+        b'To mark the changeset bad:     hg bisect --bad\n'
+        b'To abort:                      hg bisect --reset\n'
     ),
 )
 
 
 def getrepostate(repo):
     # experimental config: commands.status.skipstates
-    skip = set(repo.ui.configlist('commands', 'status.skipstates'))
+    skip = set(repo.ui.configlist(b'commands', b'status.skipstates'))
     for state in _unfinishedstates:
         if state._opname in skip:
             continue
--- a/mercurial/statichttprepo.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/statichttprepo.py	Sun Oct 06 09:48:39 2019 -0400
@@ -49,7 +49,7 @@
 
     def read(self, bytes=None):
         req = urlreq.request(pycompat.strurl(self.url))
-        end = ''
+        end = b''
         if bytes:
             end = self.pos + bytes - 1
         if self.pos or end:
@@ -112,7 +112,7 @@
 
     def http_error_416(self, req, fp, code, msg, hdrs):
         # HTTP's Range Not Satisfiable error
-        raise _RangeError('Requested Range Not Satisfiable')
+        raise _RangeError(b'Requested Range Not Satisfiable')
 
 
 def build_opener(ui, authinfo):
@@ -125,10 +125,10 @@
             self.base = base
             self.options = {}
 
-        def __call__(self, path, mode='r', *args, **kw):
-            if mode not in ('r', 'rb'):
-                raise IOError('Permission denied')
-            f = "/".join((self.base, urlreq.quote(path)))
+        def __call__(self, path, mode=b'r', *args, **kw):
+            if mode not in (b'r', b'rb'):
+                raise IOError(b'Permission denied')
+            f = b"/".join((self.base, urlreq.quote(path)))
             return httprangereader(f, urlopener)
 
         def join(self, path):
@@ -158,12 +158,12 @@
         self.ui = ui
 
         self.root = path
-        u = util.url(path.rstrip('/') + "/.hg")
+        u = util.url(path.rstrip(b'/') + b"/.hg")
         self.path, authinfo = u.authinfo()
 
         vfsclass = build_opener(ui, authinfo)
         self.vfs = vfsclass(self.path)
-        self.cachevfs = vfsclass(self.vfs.join('cache'))
+        self.cachevfs = vfsclass(self.vfs.join(b'cache'))
         self._phasedefaults = []
 
         self.names = namespaces.namespaces()
@@ -179,14 +179,14 @@
 
             # check if it is a non-empty old-style repository
             try:
-                fp = self.vfs("00changelog.i")
+                fp = self.vfs(b"00changelog.i")
                 fp.read(1)
                 fp.close()
             except IOError as inst:
                 if inst.errno != errno.ENOENT:
                     raise
                 # we do not care about empty old-style repositories here
-                msg = _("'%s' does not appear to be an hg repository") % path
+                msg = _(b"'%s' does not appear to be an hg repository") % path
                 raise error.RepoError(msg)
 
         supportedrequirements = localrepo.gathersupportedrequirements(ui)
@@ -218,7 +218,7 @@
 
     def _restrictcapabilities(self, caps):
         caps = super(statichttprepository, self)._restrictcapabilities(caps)
-        return caps.difference(["pushkey"])
+        return caps.difference([b"pushkey"])
 
     def url(self):
         return self._url
@@ -232,13 +232,13 @@
     def wlock(self, wait=True):
         raise error.LockUnavailable(
             0,
-            _('lock not available'),
-            'lock',
-            _('cannot lock static-http repository'),
+            _(b'lock not available'),
+            b'lock',
+            _(b'cannot lock static-http repository'),
         )
 
     def lock(self, wait=True):
-        raise error.Abort(_('cannot lock static-http repository'))
+        raise error.Abort(_(b'cannot lock static-http repository'))
 
     def _writecaches(self):
         pass  # statichttprepository are read only
@@ -246,5 +246,5 @@
 
 def instance(ui, path, create, intents=None, createopts=None):
     if create:
-        raise error.Abort(_('cannot create new static-http repository'))
+        raise error.Abort(_(b'cannot create new static-http repository'))
     return statichttprepository(ui, path[7:])
--- a/mercurial/statprof.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/statprof.py	Sun Oct 06 09:48:39 2019 -0400
@@ -122,7 +122,7 @@
 defaultdict = collections.defaultdict
 contextmanager = contextlib.contextmanager
 
-__all__ = ['start', 'stop', 'reset', 'display', 'profile']
+__all__ = [b'start', b'stop', b'reset', b'display', b'profile']
 
 skips = {
     r"util.py:check",
@@ -157,7 +157,7 @@
 class ProfileState(object):
     def __init__(self, frequency=None):
         self.reset(frequency)
-        self.track = 'cpu'
+        self.track = b'cpu'
 
     def reset(self, frequency=None):
         # total so far
@@ -194,7 +194,7 @@
 
     @property
     def timeidx(self):
-        if self.track == 'real':
+        if self.track == b'real':
             return 1
         return 0
 
@@ -238,7 +238,7 @@
         if self.source is None:
             lineno = self.lineno - 1
             try:
-                with open(self.path, 'rb') as fp:
+                with open(self.path, b'rb') as fp:
                     for i, line in enumerate(fp):
                         if i == lineno:
                             self.source = line.strip()
@@ -246,11 +246,11 @@
             except:
                 pass
             if self.source is None:
-                self.source = ''
+                self.source = b''
 
         source = self.source
         if len(source) > length:
-            source = source[: (length - 3)] + "..."
+            source = source[: (length - 3)] + b"..."
         return source
 
     def filename(self):
@@ -330,7 +330,7 @@
 lastmechanism = None
 
 
-def start(mechanism='thread', track='cpu'):
+def start(mechanism=b'thread', track=b'cpu'):
     '''Install the profiling signal handler, and start profiling.'''
     state.track = track  # note: nesting different mode won't work
     state.profile_level += 1
@@ -342,16 +342,16 @@
         global lastmechanism
         lastmechanism = mechanism
 
-        if mechanism == 'signal':
+        if mechanism == b'signal':
             signal.signal(signal.SIGPROF, profile_signal_handler)
             signal.setitimer(
                 signal.ITIMER_PROF, rpt or state.sample_interval, 0.0
             )
-        elif mechanism == 'thread':
+        elif mechanism == b'thread':
             frame = inspect.currentframe()
             tid = [k for k, f in sys._current_frames().items() if f == frame][0]
             state.thread = threading.Thread(
-                target=samplerthread, args=(tid,), name="samplerthread"
+                target=samplerthread, args=(tid,), name=b"samplerthread"
             )
             state.thread.start()
 
@@ -360,17 +360,17 @@
     '''Stop profiling, and uninstall the profiling signal handler.'''
     state.profile_level -= 1
     if state.profile_level == 0:
-        if lastmechanism == 'signal':
+        if lastmechanism == b'signal':
             rpt = signal.setitimer(signal.ITIMER_PROF, 0.0, 0.0)
             signal.signal(signal.SIGPROF, signal.SIG_IGN)
             state.remaining_prof_time = rpt[0]
-        elif lastmechanism == 'thread':
+        elif lastmechanism == b'thread':
             stopthread.set()
             state.thread.join()
 
         state.accumulate_time(clock())
         state.last_start_time = None
-        statprofpath = encoding.environ.get('STATPROF_DEST')
+        statprofpath = encoding.environ.get(b'STATPROF_DEST')
         if statprofpath:
             save_data(statprofpath)
 
@@ -378,29 +378,30 @@
 
 
 def save_data(path):
-    with open(path, 'w+') as file:
-        file.write("%f %f\n" % state.accumulated_time)
+    with open(path, b'w+') as file:
+        file.write(b"%f %f\n" % state.accumulated_time)
         for sample in state.samples:
             time = sample.time
             stack = sample.stack
             sites = [
-                '\1'.join([s.path, b'%d' % s.lineno, s.function]) for s in stack
+                b'\1'.join([s.path, b'%d' % s.lineno, s.function])
+                for s in stack
             ]
-            file.write("%d\0%s\n" % (time, '\0'.join(sites)))
+            file.write(b"%d\0%s\n" % (time, b'\0'.join(sites)))
 
 
 def load_data(path):
-    lines = open(path, 'rb').read().splitlines()
+    lines = open(path, b'rb').read().splitlines()
 
     state.accumulated_time = [float(value) for value in lines[0].split()]
     state.samples = []
     for line in lines[1:]:
-        parts = line.split('\0')
+        parts = line.split(b'\0')
         time = float(parts[0])
         rawsites = parts[1:]
         sites = []
         for rawsite in rawsites:
-            siteparts = rawsite.split('\1')
+            siteparts = rawsite.split(b'\1')
             sites.append(
                 CodeSite.get(siteparts[0], int(siteparts[1]), siteparts[2])
             )
@@ -414,7 +415,7 @@
 
     The optional frequency argument specifies the number of samples to
     collect per second.'''
-    assert state.profile_level == 0, "Can't reset() while statprof is running"
+    assert state.profile_level == 0, b"Can't reset() while statprof is running"
     CodeSite.cache.clear()
     state.reset(frequency)
 
@@ -514,7 +515,7 @@
     elif format == DisplayFormats.Chrome:
         write_to_chrome(data, fp, **kwargs)
     else:
-        raise Exception("Invalid display format")
+        raise Exception(b"Invalid display format")
 
     if format not in (DisplayFormats.Json, DisplayFormats.Chrome):
         fp.write(b'---\n')
@@ -539,7 +540,7 @@
 
     for stat in stats:
         site = stat.site
-        sitelabel = '%s:%d:%s' % (site.filename(), site.lineno, site.function)
+        sitelabel = b'%s:%d:%s' % (site.filename(), site.lineno, site.function)
         fp.write(
             b'%6.2f %9.2f %9.2f  %s\n'
             % (
@@ -556,11 +557,12 @@
     as one row in a table.  Important lines within that function are
     output as nested rows.  Sorted by self-time per line.'''
     fp.write(
-        b'%5.5s %10.10s   %7.7s  %-8.8s\n' % ('%  ', 'cumulative', 'self', '')
+        b'%5.5s %10.10s   %7.7s  %-8.8s\n'
+        % (b'%  ', b'cumulative', b'self', b'')
     )
     fp.write(
         b'%5.5s  %9.9s  %8.8s  %-8.8s\n'
-        % ("time", "seconds", "seconds", "name")
+        % (b"time", b"seconds", b"seconds", b"name")
     )
 
     stats = SiteStats.buildstats(data.samples)
@@ -622,11 +624,11 @@
 
 def display_about_method(data, fp, function=None, **kwargs):
     if function is None:
-        raise Exception("Invalid function")
+        raise Exception(b"Invalid function")
 
     filename = None
-    if ':' in function:
-        filename, function = function.split(':')
+    if b':' in function:
+        filename, function = function.split(b':')
 
     relevant_samples = 0
     parents = {}
@@ -685,7 +687,7 @@
     fp.write(
         b'\n    %s:%s    Total: %0.2fs (%0.2f%%)    Self: %0.2fs (%0.2f%%)\n\n'
         % (
-            pycompat.sysbytes(filename or '___'),
+            pycompat.sysbytes(filename or b'___'),
             pycompat.sysbytes(function),
             total_cum_sec,
             total_cum_percent,
@@ -746,37 +748,41 @@
         ]
         if site:
             indent = depth * 2 - 1
-            filename = ''
-            function = ''
+            filename = b''
+            function = b''
             if len(node.children) > 0:
                 childsite = list(node.children.itervalues())[0].site
-                filename = (childsite.filename() + ':').ljust(15)
+                filename = (childsite.filename() + b':').ljust(15)
                 function = childsite.function
 
             # lots of string formatting
             listpattern = (
-                ''.ljust(indent)
-                + ('\\' if multiple_siblings else '|')
-                + ' %4.1f%%'
-                + (' %5.2fs' % node.count if showtime else '')
-                + '  %s %s'
+                b''.ljust(indent)
+                + (b'\\' if multiple_siblings else b'|')
+                + b' %4.1f%%'
+                + (b' %5.2fs' % node.count if showtime else b'')
+                + b'  %s %s'
             )
             liststring = listpattern % (
                 node.count / root.count * 100,
                 filename,
                 function,
             )
-            codepattern = '%' + ('%d' % (55 - len(liststring))) + 's %d:  %s'
-            codestring = codepattern % ('line', site.lineno, site.getsource(30))
+            codepattern = b'%' + (b'%d' % (55 - len(liststring))) + b's %d:  %s'
+            codestring = codepattern % (
+                b'line',
+                site.lineno,
+                site.getsource(30),
+            )
 
             finalstring = liststring + codestring
             childrensamples = sum([c.count for c in node.children.itervalues()])
             # Make frames that performed more than 10% of the operation red
             if node.count - childrensamples > (0.1 * root.count):
-                finalstring = '\033[91m' + finalstring + '\033[0m'
+                finalstring = b'\033[91m' + finalstring + b'\033[0m'
             # Make frames that didn't actually perform work dark grey
             elif node.count - childrensamples == 0:
-                finalstring = '\033[90m' + finalstring + '\033[0m'
+                finalstring = b'\033[90m' + finalstring + b'\033[0m'
             fp.write(finalstring + b'\n')
 
         newdepth = depth
@@ -793,7 +799,7 @@
 
 def write_to_flame(data, fp, scriptpath=None, outputfile=None, **kwargs):
     if scriptpath is None:
-        scriptpath = encoding.environ['HOME'] + '/flamegraph.pl'
+        scriptpath = encoding.environ[b'HOME'] + b'/flamegraph.pl'
     if not os.path.exists(scriptpath):
         fp.write(b'error: missing %s\n' % scriptpath)
         fp.write(b'get it here: https://github.com/brendangregg/FlameGraph\n')
@@ -803,7 +809,7 @@
     for sample in data.samples:
         sites = [s.function for s in sample.stack]
         sites.reverse()
-        line = ';'.join(sites)
+        line = b';'.join(sites)
         if line in lines:
             lines[line] = lines[line] + 1
         else:
@@ -811,14 +817,14 @@
 
     fd, path = pycompat.mkstemp()
 
-    with open(path, "w+") as file:
+    with open(path, b"w+") as file:
         for line, count in lines.iteritems():
-            file.write("%s %d\n" % (line, count))
+            file.write(b"%s %d\n" % (line, count))
 
     if outputfile is None:
-        outputfile = '~/flamegraph.svg'
+        outputfile = b'~/flamegraph.svg'
 
-    os.system("perl ~/flamegraph.pl %s > %s" % (path, outputfile))
+    os.system(b"perl ~/flamegraph.pl %s > %s" % (path, outputfile))
     fp.write(b'Written to %s\n' % outputfile)
 
 
@@ -983,7 +989,7 @@
     if not isinstance(data, bytes):
         data = data.encode('utf-8')
     fp.write(data)
-    fp.write('\n')
+    fp.write(b'\n')
 
 
 def printusage():
@@ -1020,19 +1026,19 @@
     displayargs = {}
 
     optstart = 2
-    displayargs['function'] = None
+    displayargs[b'function'] = None
     if argv[1] == r'hotpath':
-        displayargs['format'] = DisplayFormats.Hotpath
+        displayargs[b'format'] = DisplayFormats.Hotpath
     elif argv[1] == r'lines':
-        displayargs['format'] = DisplayFormats.ByLine
+        displayargs[b'format'] = DisplayFormats.ByLine
     elif argv[1] == r'functions':
-        displayargs['format'] = DisplayFormats.ByMethod
+        displayargs[b'format'] = DisplayFormats.ByMethod
     elif argv[1] == r'function':
-        displayargs['format'] = DisplayFormats.AboutMethod
-        displayargs['function'] = argv[2]
+        displayargs[b'format'] = DisplayFormats.AboutMethod
+        displayargs[b'function'] = argv[2]
         optstart = 3
     elif argv[1] == r'flame':
-        displayargs['format'] = DisplayFormats.FlameGraph
+        displayargs[b'format'] = DisplayFormats.FlameGraph
     else:
         printusage()
         return 0
@@ -1041,30 +1047,30 @@
     try:
         opts, args = pycompat.getoptb(
             sys.argv[optstart:],
-            "hl:f:o:p:",
-            ["help", "limit=", "file=", "output-file=", "script-path="],
+            b"hl:f:o:p:",
+            [b"help", b"limit=", b"file=", b"output-file=", b"script-path="],
         )
     except getopt.error as msg:
         print(msg)
         printusage()
         return 2
 
-    displayargs['limit'] = 0.05
+    displayargs[b'limit'] = 0.05
     path = None
     for o, value in opts:
         if o in (r"-l", r"--limit"):
-            displayargs['limit'] = float(value)
+            displayargs[b'limit'] = float(value)
         elif o in (r"-f", r"--file"):
             path = value
         elif o in (r"-o", r"--output-file"):
-            displayargs['outputfile'] = value
+            displayargs[b'outputfile'] = value
         elif o in (r"-p", r"--script-path"):
-            displayargs['scriptpath'] = value
+            displayargs[b'scriptpath'] = value
         elif o in (r"-h", r"help"):
             printusage()
             return 0
         else:
-            assert False, "unhandled option %s" % o
+            assert False, b"unhandled option %s" % o
 
     if not path:
         print(r'must specify --file to load')
--- a/mercurial/store.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/store.py	Sun Oct 06 09:48:39 2019 -0400
@@ -40,12 +40,12 @@
     if matcher is None:
         return True
     path = decodedir(path)
-    if path.startswith('data/'):
-        return matcher(path[len('data/') : -len('.i')])
-    elif path.startswith('meta/'):
-        return matcher.visitdir(path[len('meta/') : -len('/00manifest.i')])
+    if path.startswith(b'data/'):
+        return matcher(path[len(b'data/') : -len(b'.i')])
+    elif path.startswith(b'meta/'):
+        return matcher.visitdir(path[len(b'meta/') : -len(b'/00manifest.i')])
 
-    raise error.ProgrammingError("cannot decode path %s" % path)
+    raise error.ProgrammingError(b"cannot decode path %s" % path)
 
 
 # This avoids a collision between a file named foo and a dir named
@@ -62,9 +62,9 @@
     'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
     '''
     return (
-        path.replace(".hg/", ".hg.hg/")
-        .replace(".i/", ".i.hg/")
-        .replace(".d/", ".d.hg/")
+        path.replace(b".hg/", b".hg.hg/")
+        .replace(b".i/", b".i.hg/")
+        .replace(b".d/", b".d.hg/")
     )
 
 
@@ -80,12 +80,12 @@
     >>> decodedir(b'data/foo.i.hg.hg/bla.i')
     'data/foo.i.hg/bla.i'
     '''
-    if ".hg/" not in path:
+    if b".hg/" not in path:
         return path
     return (
-        path.replace(".d.hg/", ".d/")
-        .replace(".i.hg/", ".i/")
-        .replace(".hg.hg/", ".hg/")
+        path.replace(b".d.hg/", b".d/")
+        .replace(b".i.hg/", b".i/")
+        .replace(b".hg.hg/", b".hg/")
     )
 
 
@@ -131,14 +131,14 @@
     >>> dec(b'the~07quick~adshot')
     'the\\x07quick\\xadshot'
     '''
-    e = '_'
+    e = b'_'
     xchr = pycompat.bytechr
     asciistr = list(map(xchr, range(127)))
-    capitals = list(range(ord("A"), ord("Z") + 1))
+    capitals = list(range(ord(b"A"), ord(b"Z") + 1))
 
     cmap = dict((x, x) for x in asciistr)
     for x in _reserved():
-        cmap[xchr(x)] = "~%02x" % x
+        cmap[xchr(x)] = b"~%02x" % x
     for x in capitals + [ord(e)]:
         cmap[xchr(x)] = e + xchr(x).lower()
 
@@ -160,10 +160,10 @@
                 raise KeyError
 
     return (
-        lambda s: ''.join(
+        lambda s: b''.join(
             [cmap[s[c : c + 1]] for c in pycompat.xrange(len(s))]
         ),
-        lambda s: ''.join(list(decode(s))),
+        lambda s: b''.join(list(decode(s))),
     )
 
 
@@ -201,12 +201,12 @@
     xchr = pycompat.bytechr
     cmap = dict([(xchr(x), xchr(x)) for x in pycompat.xrange(127)])
     for x in _reserved():
-        cmap[xchr(x)] = "~%02x" % x
-    for x in range(ord("A"), ord("Z") + 1):
+        cmap[xchr(x)] = b"~%02x" % x
+    for x in range(ord(b"A"), ord(b"Z") + 1):
         cmap[xchr(x)] = xchr(x).lower()
 
     def lowerencode(s):
-        return "".join([cmap[c] for c in pycompat.iterbytestr(s)])
+        return b"".join([cmap[c] for c in pycompat.iterbytestr(s)])
 
     return lowerencode
 
@@ -214,8 +214,8 @@
 lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
 
 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
-_winres3 = ('aux', 'con', 'prn', 'nul')  # length 3
-_winres4 = ('com', 'lpt')  # length 4 (with trailing 1..9)
+_winres3 = (b'aux', b'con', b'prn', b'nul')  # length 3
+_winres4 = (b'com', b'lpt')  # length 4 (with trailing 1..9)
 
 
 def _auxencode(path, dotencode):
@@ -243,23 +243,26 @@
     for i, n in enumerate(path):
         if not n:
             continue
-        if dotencode and n[0] in '. ':
-            n = "~%02x" % ord(n[0:1]) + n[1:]
+        if dotencode and n[0] in b'. ':
+            n = b"~%02x" % ord(n[0:1]) + n[1:]
             path[i] = n
         else:
-            l = n.find('.')
+            l = n.find(b'.')
             if l == -1:
                 l = len(n)
             if (l == 3 and n[:3] in _winres3) or (
-                l == 4 and n[3:4] <= '9' and n[3:4] >= '1' and n[:3] in _winres4
+                l == 4
+                and n[3:4] <= b'9'
+                and n[3:4] >= b'1'
+                and n[:3] in _winres4
             ):
                 # encode third letter ('aux' -> 'au~78')
-                ec = "~%02x" % ord(n[2:3])
+                ec = b"~%02x" % ord(n[2:3])
                 n = n[0:2] + ec + n[3:]
                 path[i] = n
-        if n[-1] in '. ':
+        if n[-1] in b'. ':
             # encode last period or space ('foo...' -> 'foo..~2e')
-            path[i] = n[:-1] + "~%02x" % ord(n[-1:])
+            path[i] = n[:-1] + b"~%02x" % ord(n[-1:])
     return path
 
 
@@ -270,7 +273,7 @@
 
 def _hashencode(path, dotencode):
     digest = node.hex(hashlib.sha1(path).digest())
-    le = lowerencode(path[5:]).split('/')  # skips prefix 'data/' or 'meta/'
+    le = lowerencode(path[5:]).split(b'/')  # skips prefix 'data/' or 'meta/'
     parts = _auxencode(le, dotencode)
     basename = parts[-1]
     _root, ext = os.path.splitext(basename)
@@ -278,9 +281,9 @@
     sdirslen = 0
     for p in parts[:-1]:
         d = p[:_dirprefixlen]
-        if d[-1] in '. ':
+        if d[-1] in b'. ':
             # Windows can't access dirs ending in period or space
-            d = d[:-1] + '_'
+            d = d[:-1] + b'_'
         if sdirslen == 0:
             t = len(d)
         else:
@@ -289,14 +292,14 @@
                 break
         sdirs.append(d)
         sdirslen = t
-    dirs = '/'.join(sdirs)
+    dirs = b'/'.join(sdirs)
     if len(dirs) > 0:
-        dirs += '/'
-    res = 'dh/' + dirs + digest + ext
+        dirs += b'/'
+    res = b'dh/' + dirs + digest + ext
     spaceleft = _maxstorepathlen - len(res)
     if spaceleft > 0:
         filler = basename[:spaceleft]
-        res = 'dh/' + dirs + filler + digest + ext
+        res = b'dh/' + dirs + filler + digest + ext
     return res
 
 
@@ -332,8 +335,8 @@
     encoding was used.
     '''
     path = encodedir(path)
-    ef = _encodefname(path).split('/')
-    res = '/'.join(_auxencode(ef, dotencode))
+    ef = _encodefname(path).split(b'/')
+    res = b'/'.join(_auxencode(ef, dotencode))
     if len(res) > _maxstorepathlen:
         res = _hashencode(path, dotencode)
     return res
@@ -343,8 +346,8 @@
     de = encodedir(path)
     if len(path) > _maxstorepathlen:
         return _hashencode(de, True)
-    ef = _encodefname(de).split('/')
-    res = '/'.join(_auxencode(ef, True))
+    ef = _encodefname(de).split(b'/')
+    res = b'/'.join(_auxencode(ef, True))
     if len(res) > _maxstorepathlen:
         return _hashencode(de, True)
     return res
@@ -370,13 +373,13 @@
 
 
 _data = (
-    'bookmarks narrowspec data meta 00manifest.d 00manifest.i'
-    ' 00changelog.d 00changelog.i phaseroots obsstore'
+    b'bookmarks narrowspec data meta 00manifest.d 00manifest.i'
+    b' 00changelog.d 00changelog.i phaseroots obsstore'
 )
 
 
 def isrevlog(f, kind, st):
-    return kind == stat.S_IFREG and f[-2:] in ('.i', '.d')
+    return kind == stat.S_IFREG and f[-2:] in (b'.i', b'.d')
 
 
 class basicstore(object):
@@ -392,13 +395,13 @@
         self.opener = self.vfs
 
     def join(self, f):
-        return self.path + '/' + encodedir(f)
+        return self.path + b'/' + encodedir(f)
 
     def _walk(self, relpath, recurse, filefilter=isrevlog):
         '''yields (unencoded, encoded, size)'''
         path = self.path
         if relpath:
-            path += '/' + relpath
+            path += b'/' + relpath
         striplen = len(self.path) + 1
         l = []
         if self.rawvfs.isdir(path):
@@ -407,7 +410,7 @@
             while visit:
                 p = visit.pop()
                 for f, kind, st in readdir(p, stat=True):
-                    fp = p + '/' + f
+                    fp = p + b'/' + f
                     if filefilter(f, kind, st):
                         n = util.pconvert(fp[striplen:])
                         l.append((decodedir(n), n, st.st_size))
@@ -424,11 +427,11 @@
         return manifest.manifestlog(self.vfs, repo, rootstore, storenarrowmatch)
 
     def datafiles(self, matcher=None):
-        return self._walk('data', True) + self._walk('meta', True)
+        return self._walk(b'data', True) + self._walk(b'meta', True)
 
     def topfiles(self):
         # yield manifest before changelog
-        return reversed(self._walk('', False))
+        return reversed(self._walk(b'', False))
 
     def walk(self, matcher=None):
         '''yields (unencoded, encoded, size)
@@ -443,7 +446,7 @@
             yield x
 
     def copylist(self):
-        return ['requires'] + _data.split()
+        return [b'requires'] + _data.split()
 
     def write(self, tr):
         pass
@@ -456,19 +459,19 @@
 
     def __contains__(self, path):
         '''Checks if the store contains path'''
-        path = "/".join(("data", path))
+        path = b"/".join((b"data", path))
         # file?
-        if self.vfs.exists(path + ".i"):
+        if self.vfs.exists(path + b".i"):
             return True
         # dir?
-        if not path.endswith("/"):
-            path = path + "/"
+        if not path.endswith(b"/"):
+            path = path + b"/"
         return self.vfs.exists(path)
 
 
 class encodedstore(basicstore):
     def __init__(self, path, vfstype):
-        vfs = vfstype(path + '/store')
+        vfs = vfstype(path + b'/store')
         self.path = vfs.base
         self.createmode = _calcmode(vfs)
         vfs.createmode = self.createmode
@@ -487,11 +490,11 @@
             yield a, b, size
 
     def join(self, f):
-        return self.path + '/' + encodefilename(f)
+        return self.path + b'/' + encodefilename(f)
 
     def copylist(self):
-        return ['requires', '00changelog.i'] + [
-            'store/' + f for f in _data.split()
+        return [b'requires', b'00changelog.i'] + [
+            b'store/' + f for f in _data.split()
         ]
 
 
@@ -517,7 +520,7 @@
         '''fill the entries from the fncache file'''
         self._dirty = False
         try:
-            fp = self.vfs('fncache', mode='rb')
+            fp = self.vfs(b'fncache', mode=b'rb')
         except IOError:
             # skip nonexistent file
             self.entries = set()
@@ -537,14 +540,15 @@
                 pass
 
         if chunk:
-            msg = _("fncache does not ends with a newline")
+            msg = _(b"fncache does not ends with a newline")
             if warn:
-                warn(msg + '\n')
+                warn(msg + b'\n')
             else:
                 raise error.Abort(
                     msg,
                     hint=_(
-                        "use 'hg debugrebuildfncache' to " "rebuild the fncache"
+                        b"use 'hg debugrebuildfncache' to "
+                        b"rebuild the fncache"
                     ),
                 )
         self._checkentries(fp, warn)
@@ -552,13 +556,13 @@
 
     def _checkentries(self, fp, warn):
         """ make sure there is no empty string in entries """
-        if '' in self.entries:
+        if b'' in self.entries:
             fp.seek(0)
             for n, line in enumerate(util.iterfile(fp)):
-                if not line.rstrip('\n'):
-                    t = _('invalid entry in fncache, line %d') % (n + 1)
+                if not line.rstrip(b'\n'):
+                    t = _(b'invalid entry in fncache, line %d') % (n + 1)
                     if warn:
-                        warn(t + '\n')
+                        warn(t + b'\n')
                     else:
                         raise error.Abort(t)
 
@@ -567,18 +571,18 @@
             assert self.entries is not None
             self.entries = self.entries | self.addls
             self.addls = set()
-            tr.addbackup('fncache')
-            fp = self.vfs('fncache', mode='wb', atomictemp=True)
+            tr.addbackup(b'fncache')
+            fp = self.vfs(b'fncache', mode=b'wb', atomictemp=True)
             if self.entries:
-                fp.write(encodedir('\n'.join(self.entries) + '\n'))
+                fp.write(encodedir(b'\n'.join(self.entries) + b'\n'))
             fp.close()
             self._dirty = False
         if self.addls:
             # if we have just new entries, let's append them to the fncache
-            tr.addbackup('fncache')
-            fp = self.vfs('fncache', mode='ab', atomictemp=True)
+            tr.addbackup(b'fncache')
+            fp = self.vfs(b'fncache', mode=b'ab', atomictemp=True)
             if self.addls:
-                fp.write(encodedir('\n'.join(self.addls) + '\n'))
+                fp.write(encodedir(b'\n'.join(self.addls) + b'\n'))
             fp.close()
             self.entries = None
             self.addls = set()
@@ -620,15 +624,15 @@
         self.fncache = fnc
         self.encode = encode
 
-    def __call__(self, path, mode='r', *args, **kw):
+    def __call__(self, path, mode=b'r', *args, **kw):
         encoded = self.encode(path)
-        if mode not in ('r', 'rb') and (
-            path.startswith('data/') or path.startswith('meta/')
+        if mode not in (b'r', b'rb') and (
+            path.startswith(b'data/') or path.startswith(b'meta/')
         ):
             # do not trigger a fncache load when adding a file that already is
             # known to exist.
             notload = self.fncache.entries is None and self.vfs.exists(encoded)
-            if notload and 'a' in mode and not self.vfs.stat(encoded).st_size:
+            if notload and b'a' in mode and not self.vfs.stat(encoded).st_size:
                 # when appending to an existing file, if the file has size zero,
                 # it should be considered as missing. Such zero-size files are
                 # the result of truncation when a transaction is aborted.
@@ -651,9 +655,9 @@
         else:
             encode = _plainhybridencode
         self.encode = encode
-        vfs = vfstype(path + '/store')
+        vfs = vfstype(path + b'/store')
         self.path = vfs.base
-        self.pathsep = self.path + '/'
+        self.pathsep = self.path + b'/'
         self.createmode = _calcmode(vfs)
         vfs.createmode = self.createmode
         self.rawvfs = vfs
@@ -681,10 +685,12 @@
 
     def copylist(self):
         d = (
-            'bookmarks narrowspec data meta dh fncache phaseroots obsstore'
-            ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
+            b'bookmarks narrowspec data meta dh fncache phaseroots obsstore'
+            b' 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
         )
-        return ['requires', '00changelog.i'] + ['store/' + f for f in d.split()]
+        return [b'requires', b'00changelog.i'] + [
+            b'store/' + f for f in d.split()
+        ]
 
     def write(self, tr):
         self.fncache.write(tr)
@@ -709,14 +715,14 @@
 
     def __contains__(self, path):
         '''Checks if the store contains path'''
-        path = "/".join(("data", path))
+        path = b"/".join((b"data", path))
         # check for files (exact match)
-        e = path + '.i'
+        e = path + b'.i'
         if e in self.fncache and self._exists(e):
             return True
         # now check for directories (prefix match)
-        if not path.endswith('/'):
-            path += '/'
+        if not path.endswith(b'/'):
+            path += b'/'
         for e in self.fncache:
             if e.startswith(path) and self._exists(e):
                 return True
--- a/mercurial/streamclone.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/streamclone.py	Sun Oct 06 09:48:39 2019 -0400
@@ -40,7 +40,7 @@
 
     bundle2supported = False
     if pullop.canusebundle2:
-        if 'v2' in pullop.remotebundle2caps.get('stream', []):
+        if b'v2' in pullop.remotebundle2caps.get(b'stream', []):
             bundle2supported = True
         # else
         # Server doesn't support bundle2 stream clone or doesn't support
@@ -67,7 +67,7 @@
     # likely only comes into play in LANs.
     if streamrequested is None:
         # The server can advertise whether to prefer streaming clone.
-        streamrequested = remote.capable('stream-preferred')
+        streamrequested = remote.capable(b'stream-preferred')
 
     if not streamrequested:
         return False, None
@@ -80,35 +80,35 @@
     # if the only requirement is "revlogv1." Else, the "streamreqs" capability
     # is advertised and contains a comma-delimited list of requirements.
     requirements = set()
-    if remote.capable('stream'):
-        requirements.add('revlogv1')
+    if remote.capable(b'stream'):
+        requirements.add(b'revlogv1')
     else:
-        streamreqs = remote.capable('streamreqs')
+        streamreqs = remote.capable(b'streamreqs')
         # This is weird and shouldn't happen with modern servers.
         if not streamreqs:
             pullop.repo.ui.warn(
                 _(
-                    'warning: stream clone requested but server has them '
-                    'disabled\n'
+                    b'warning: stream clone requested but server has them '
+                    b'disabled\n'
                 )
             )
             return False, None
 
-        streamreqs = set(streamreqs.split(','))
+        streamreqs = set(streamreqs.split(b','))
         # Server requires something we don't support. Bail.
         missingreqs = streamreqs - repo.supportedformats
         if missingreqs:
             pullop.repo.ui.warn(
                 _(
-                    'warning: stream clone requested but client is missing '
-                    'requirements: %s\n'
+                    b'warning: stream clone requested but client is missing '
+                    b'requirements: %s\n'
                 )
-                % ', '.join(sorted(missingreqs))
+                % b', '.join(sorted(missingreqs))
             )
             pullop.repo.ui.warn(
                 _(
-                    '(see https://www.mercurial-scm.org/wiki/MissingRequirement '
-                    'for more information)\n'
+                    b'(see https://www.mercurial-scm.org/wiki/MissingRequirement '
+                    b'for more information)\n'
                 )
             )
             return False, None
@@ -139,14 +139,14 @@
     # Save remote branchmap. We will use it later to speed up branchcache
     # creation.
     rbranchmap = None
-    if remote.capable('branchmap'):
+    if remote.capable(b'branchmap'):
         with remote.commandexecutor() as e:
-            rbranchmap = e.callcommand('branchmap', {}).result()
+            rbranchmap = e.callcommand(b'branchmap', {}).result()
 
-    repo.ui.status(_('streaming all changes\n'))
+    repo.ui.status(_(b'streaming all changes\n'))
 
     with remote.commandexecutor() as e:
-        fp = e.callcommand('stream_out', {}).result()
+        fp = e.callcommand(b'stream_out', {}).result()
 
     # TODO strictly speaking, this code should all be inside the context
     # manager because the context manager is supposed to ensure all wire state
@@ -157,21 +157,21 @@
         resp = int(l)
     except ValueError:
         raise error.ResponseError(
-            _('unexpected response from remote server:'), l
+            _(b'unexpected response from remote server:'), l
         )
     if resp == 1:
-        raise error.Abort(_('operation forbidden by server'))
+        raise error.Abort(_(b'operation forbidden by server'))
     elif resp == 2:
-        raise error.Abort(_('locking the remote repository failed'))
+        raise error.Abort(_(b'locking the remote repository failed'))
     elif resp != 0:
-        raise error.Abort(_('the server sent an unknown error code'))
+        raise error.Abort(_(b'the server sent an unknown error code'))
 
     l = fp.readline()
     try:
-        filecount, bytecount = map(int, l.split(' ', 1))
+        filecount, bytecount = map(int, l.split(b' ', 1))
     except (ValueError, TypeError):
         raise error.ResponseError(
-            _('unexpected response from remote server:'), l
+            _(b'unexpected response from remote server:'), l
         )
 
     with repo.lock():
@@ -199,14 +199,14 @@
     if repository.REPO_FEATURE_STREAM_CLONE not in repo.features:
         return False
 
-    if not repo.ui.configbool('server', 'uncompressed', untrusted=True):
+    if not repo.ui.configbool(b'server', b'uncompressed', untrusted=True):
         return False
 
     # The way stream clone works makes it impossible to hide secret changesets.
     # So don't allow this by default.
     secret = phases.hassecret(repo)
     if secret:
-        return repo.ui.configbool('server', 'uncompressedallowsecret')
+        return repo.ui.configbool(b'server', b'uncompressedallowsecret')
 
     return True
 
@@ -239,14 +239,14 @@
     total_bytes = 0
     # Get consistent snapshot of repo, lock during scan.
     with repo.lock():
-        repo.ui.debug('scanning\n')
+        repo.ui.debug(b'scanning\n')
         for name, ename, size in _walkstreamfiles(repo):
             if size:
                 entries.append((name, size))
                 total_bytes += size
 
     repo.ui.debug(
-        '%d files, %d bytes to transfer\n' % (len(entries), total_bytes)
+        b'%d files, %d bytes to transfer\n' % (len(entries), total_bytes)
     )
 
     svfs = repo.svfs
@@ -255,12 +255,12 @@
     def emitrevlogdata():
         for name, size in entries:
             if debugflag:
-                repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
+                repo.ui.debug(b'sending %s (%d bytes)\n' % (name, size))
             # partially encode name over the wire for backwards compat
-            yield '%s\0%d\n' % (store.encodedir(name), size)
+            yield b'%s\0%d\n' % (store.encodedir(name), size)
             # auditing at this stage is both pointless (paths are already
             # trusted by the local repo) and expensive
-            with svfs(name, 'rb', auditpath=False) as fp:
+            with svfs(name, b'rb', auditpath=False) as fp:
                 if size <= 65536:
                     yield fp.read(size)
                 else:
@@ -282,23 +282,23 @@
     a permissions error for the server process).
     """
     if not allowservergeneration(repo):
-        yield '1\n'
+        yield b'1\n'
         return
 
     try:
         filecount, bytecount, it = generatev1(repo)
     except error.LockError:
-        yield '2\n'
+        yield b'2\n'
         return
 
     # Indicates successful response.
-    yield '0\n'
-    yield '%d %d\n' % (filecount, bytecount)
+    yield b'0\n'
+    yield b'%d %d\n' % (filecount, bytecount)
     for chunk in it:
         yield chunk
 
 
-def generatebundlev1(repo, compression='UN'):
+def generatebundlev1(repo, compression=b'UN'):
     """Emit content for version 1 of a stream clone bundle.
 
     The first 4 bytes of the output ("HGS1") denote this as stream clone
@@ -320,30 +320,30 @@
 
     Returns a tuple of (requirements, data generator).
     """
-    if compression != 'UN':
-        raise ValueError('we do not support the compression argument yet')
+    if compression != b'UN':
+        raise ValueError(b'we do not support the compression argument yet')
 
     requirements = repo.requirements & repo.supportedformats
-    requires = ','.join(sorted(requirements))
+    requires = b','.join(sorted(requirements))
 
     def gen():
-        yield 'HGS1'
+        yield b'HGS1'
         yield compression
 
         filecount, bytecount, it = generatev1(repo)
         repo.ui.status(
-            _('writing %d bytes for %d files\n') % (bytecount, filecount)
+            _(b'writing %d bytes for %d files\n') % (bytecount, filecount)
         )
 
-        yield struct.pack('>QQ', filecount, bytecount)
-        yield struct.pack('>H', len(requires) + 1)
-        yield requires + '\0'
+        yield struct.pack(b'>QQ', filecount, bytecount)
+        yield struct.pack(b'>H', len(requires) + 1)
+        yield requires + b'\0'
 
         # This is where we'll add compression in the future.
-        assert compression == 'UN'
+        assert compression == b'UN'
 
         progress = repo.ui.makeprogress(
-            _('bundle'), total=bytecount, unit=_('bytes')
+            _(b'bundle'), total=bytecount, unit=_(b'bytes')
         )
         progress.update(0)
 
@@ -367,11 +367,11 @@
     """
     with repo.lock():
         repo.ui.status(
-            _('%d files to transfer, %s of data\n')
+            _(b'%d files to transfer, %s of data\n')
             % (filecount, util.bytecount(bytecount))
         )
         progress = repo.ui.makeprogress(
-            _('clone'), total=bytecount, unit=_('bytes')
+            _(b'clone'), total=bytecount, unit=_(b'bytes')
         )
         progress.update(0)
         start = util.timer()
@@ -390,25 +390,25 @@
         # nesting occurs also in ordinary case (e.g. enabling
         # clonebundles).
 
-        with repo.transaction('clone'):
+        with repo.transaction(b'clone'):
             with repo.svfs.backgroundclosing(repo.ui, expectedcount=filecount):
                 for i in pycompat.xrange(filecount):
                     # XXX doesn't support '\n' or '\r' in filenames
                     l = fp.readline()
                     try:
-                        name, size = l.split('\0', 1)
+                        name, size = l.split(b'\0', 1)
                         size = int(size)
                     except (ValueError, TypeError):
                         raise error.ResponseError(
-                            _('unexpected response from remote server:'), l
+                            _(b'unexpected response from remote server:'), l
                         )
                     if repo.ui.debugflag:
                         repo.ui.debug(
-                            'adding %s (%s)\n' % (name, util.bytecount(size))
+                            b'adding %s (%s)\n' % (name, util.bytecount(size))
                         )
                     # for backwards compat, name was partially encoded
                     path = store.decodedir(name)
-                    with repo.svfs(path, 'w', backgroundclose=True) as ofp:
+                    with repo.svfs(path, b'w', backgroundclose=True) as ofp:
                         for chunk in util.filechunkiter(fp, limit=size):
                             progress.increment(step=len(chunk))
                             ofp.write(chunk)
@@ -422,7 +422,7 @@
             elapsed = 0.001
         progress.complete()
         repo.ui.status(
-            _('transferred %s in %.1f seconds (%s/sec)\n')
+            _(b'transferred %s in %.1f seconds (%s/sec)\n')
             % (
                 util.bytecount(bytecount),
                 elapsed,
@@ -433,25 +433,28 @@
 
 def readbundle1header(fp):
     compression = fp.read(2)
-    if compression != 'UN':
+    if compression != b'UN':
         raise error.Abort(
-            _('only uncompressed stream clone bundles are ' 'supported; got %s')
+            _(
+                b'only uncompressed stream clone bundles are '
+                b'supported; got %s'
+            )
             % compression
         )
 
-    filecount, bytecount = struct.unpack('>QQ', fp.read(16))
-    requireslen = struct.unpack('>H', fp.read(2))[0]
+    filecount, bytecount = struct.unpack(b'>QQ', fp.read(16))
+    requireslen = struct.unpack(b'>H', fp.read(2))[0]
     requires = fp.read(requireslen)
 
-    if not requires.endswith('\0'):
+    if not requires.endswith(b'\0'):
         raise error.Abort(
             _(
-                'malformed stream clone bundle: '
-                'requirements not properly encoded'
+                b'malformed stream clone bundle: '
+                b'requirements not properly encoded'
             )
         )
 
-    requirements = set(requires.rstrip('\0').split(','))
+    requirements = set(requires.rstrip(b'\0').split(b','))
 
     return filecount, bytecount, requirements
 
@@ -464,15 +467,15 @@
     """
     if len(repo):
         raise error.Abort(
-            _('cannot apply stream clone bundle on non-empty ' 'repo')
+            _(b'cannot apply stream clone bundle on non-empty ' b'repo')
         )
 
     filecount, bytecount, requirements = readbundle1header(fp)
     missingreqs = requirements - repo.supportedformats
     if missingreqs:
         raise error.Abort(
-            _('unable to apply stream clone: ' 'unsupported format: %s')
-            % ', '.join(sorted(missingreqs))
+            _(b'unable to apply stream clone: ' b'unsupported format: %s')
+            % b', '.join(sorted(missingreqs))
         )
 
     consumev1(repo, fp, filecount, bytecount)
@@ -497,15 +500,15 @@
 _filefull = 1  # full snapshot file
 
 # Source of the file
-_srcstore = 's'  # store (svfs)
-_srccache = 'c'  # cache (cache)
+_srcstore = b's'  # store (svfs)
+_srccache = b'c'  # cache (cache)
 
 # This is it's own function so extensions can override it.
 def _walkstreamfullstorefiles(repo):
     """list snapshot file from the store"""
     fnames = []
     if not repo.publishing():
-        fnames.append('phaseroots')
+        fnames.append(b'phaseroots')
     return fnames
 
 
@@ -553,7 +556,7 @@
     """actually emit the stream bundle"""
     vfsmap = _makemap(repo)
     progress = repo.ui.makeprogress(
-        _('bundle'), total=totalfilesize, unit=_('bytes')
+        _(b'bundle'), total=totalfilesize, unit=_(b'bytes')
     )
     progress.update(0)
     with maketempcopies() as copy, progress:
@@ -570,7 +573,7 @@
                 fp = vfs(name)
                 size = data
             elif ftype == _filefull:
-                fp = open(data, 'rb')
+                fp = open(data, b'rb')
                 size = util.fstat(fp).st_size
             try:
                 yield util.uvarintencode(size)
@@ -609,7 +612,7 @@
         if includes or excludes:
             matcher = narrowspec.match(repo.root, includes, excludes)
 
-        repo.ui.debug('scanning\n')
+        repo.ui.debug(b'scanning\n')
         for name, ename, size in _walkstreamfiles(repo, matcher):
             if size:
                 entries.append((_srcstore, name, _fileappend, size))
@@ -618,9 +621,9 @@
             if repo.svfs.exists(name):
                 totalfilesize += repo.svfs.lstat(name).st_size
                 entries.append((_srcstore, name, _filefull, None))
-        if includeobsmarkers and repo.svfs.exists('obsstore'):
-            totalfilesize += repo.svfs.lstat('obsstore').st_size
-            entries.append((_srcstore, 'obsstore', _filefull, None))
+        if includeobsmarkers and repo.svfs.exists(b'obsstore'):
+            totalfilesize += repo.svfs.lstat(b'obsstore').st_size
+            entries.append((_srcstore, b'obsstore', _filefull, None))
         for name in cacheutil.cachetocopy(repo):
             if repo.cachevfs.exists(name):
                 totalfilesize += repo.cachevfs.lstat(name).st_size
@@ -653,19 +656,19 @@
     """
     with repo.lock():
         repo.ui.status(
-            _('%d files to transfer, %s of data\n')
+            _(b'%d files to transfer, %s of data\n')
             % (filecount, util.bytecount(filesize))
         )
 
         start = util.timer()
         progress = repo.ui.makeprogress(
-            _('clone'), total=filesize, unit=_('bytes')
+            _(b'clone'), total=filesize, unit=_(b'bytes')
         )
         progress.update(0)
 
         vfsmap = _makemap(repo)
 
-        with repo.transaction('clone'):
+        with repo.transaction(b'clone'):
             ctxs = (vfs.backgroundclosing(repo.ui) for vfs in vfsmap.values())
             with nested(*ctxs):
                 for i in range(filecount):
@@ -678,11 +681,11 @@
 
                     if repo.ui.debugflag:
                         repo.ui.debug(
-                            'adding [%s] %s (%s)\n'
+                            b'adding [%s] %s (%s)\n'
                             % (src, name, util.bytecount(datalen))
                         )
 
-                    with vfs(name, 'w') as ofp:
+                    with vfs(name, b'w') as ofp:
                         for chunk in util.filechunkiter(fp, limit=datalen):
                             progress.increment(step=len(chunk))
                             ofp.write(chunk)
@@ -695,7 +698,7 @@
         if elapsed <= 0:
             elapsed = 0.001
         repo.ui.status(
-            _('transferred %s in %.1f seconds (%s/sec)\n')
+            _(b'transferred %s in %.1f seconds (%s/sec)\n')
             % (
                 util.bytecount(progress.pos),
                 elapsed,
@@ -711,8 +714,8 @@
     missingreqs = [r for r in requirements if r not in repo.supported]
     if missingreqs:
         raise error.Abort(
-            _('unable to apply stream clone: ' 'unsupported format: %s')
-            % ', '.join(sorted(missingreqs))
+            _(b'unable to apply stream clone: ' b'unsupported format: %s')
+            % b', '.join(sorted(missingreqs))
         )
 
     consumev2(repo, fp, filecount, filesize)
--- a/mercurial/subrepo.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/subrepo.py	Sun Oct 06 09:48:39 2019 -0400
@@ -84,8 +84,8 @@
             subrepo = subrelpath(self)
             errormsg = (
                 stringutil.forcebytestr(ex)
-                + ' '
-                + _('(in subrepository "%s")') % subrepo
+                + b' '
+                + _(b'(in subrepository "%s")') % subrepo
             )
             # avoid handling this exception by raising a SubrepoAbort exception
             raise SubrepoAbort(
@@ -99,18 +99,18 @@
 def _updateprompt(ui, sub, dirty, local, remote):
     if dirty:
         msg = _(
-            ' subrepository sources for %s differ\n'
-            'you can use (l)ocal source (%s) or (r)emote source (%s).\n'
-            'what do you want to do?'
-            '$$ &Local $$ &Remote'
+            b' subrepository sources for %s differ\n'
+            b'you can use (l)ocal source (%s) or (r)emote source (%s).\n'
+            b'what do you want to do?'
+            b'$$ &Local $$ &Remote'
         ) % (subrelpath(sub), local, remote)
     else:
         msg = _(
-            ' subrepository sources for %s differ (in checked out '
-            'version)\n'
-            'you can use (l)ocal source (%s) or (r)emote source (%s).\n'
-            'what do you want to do?'
-            '$$ &Local $$ &Remote'
+            b' subrepository sources for %s differ (in checked out '
+            b'version)\n'
+            b'you can use (l)ocal source (%s) or (r)emote source (%s).\n'
+            b'what do you want to do?'
+            b'$$ &Local $$ &Remote'
         ) % (subrelpath(sub), local, remote)
     return ui.promptchoice(msg, 0)
 
@@ -121,14 +121,14 @@
             if d.lower() == ignore:
                 del dirs[i]
                 break
-        if vfs.basename(dirname).lower() != '.hg':
+        if vfs.basename(dirname).lower() != b'.hg':
             continue
         for f in names:
-            if f.lower() == 'hgrc':
+            if f.lower() == b'hgrc':
                 ui.warn(
                     _(
-                        "warning: removing potentially hostile 'hgrc' "
-                        "in '%s'\n"
+                        b"warning: removing potentially hostile 'hgrc' "
+                        b"in '%s'\n"
                     )
                     % vfs.join(dirname)
                 )
@@ -137,41 +137,41 @@
 
 def _auditsubrepopath(repo, path):
     # sanity check for potentially unsafe paths such as '~' and '$FOO'
-    if path.startswith('~') or '$' in path or util.expandpath(path) != path:
+    if path.startswith(b'~') or b'$' in path or util.expandpath(path) != path:
         raise error.Abort(
-            _('subrepo path contains illegal component: %s') % path
+            _(b'subrepo path contains illegal component: %s') % path
         )
     # auditor doesn't check if the path itself is a symlink
     pathutil.pathauditor(repo.root)(path)
     if repo.wvfs.islink(path):
-        raise error.Abort(_("subrepo '%s' traverses symbolic link") % path)
+        raise error.Abort(_(b"subrepo '%s' traverses symbolic link") % path)
 
 
 SUBREPO_ALLOWED_DEFAULTS = {
-    'hg': True,
-    'git': False,
-    'svn': False,
+    b'hg': True,
+    b'git': False,
+    b'svn': False,
 }
 
 
 def _checktype(ui, kind):
     # subrepos.allowed is a master kill switch. If disabled, subrepos are
     # disabled period.
-    if not ui.configbool('subrepos', 'allowed', True):
+    if not ui.configbool(b'subrepos', b'allowed', True):
         raise error.Abort(
-            _('subrepos not enabled'),
-            hint=_("see 'hg help config.subrepos' for details"),
+            _(b'subrepos not enabled'),
+            hint=_(b"see 'hg help config.subrepos' for details"),
         )
 
     default = SUBREPO_ALLOWED_DEFAULTS.get(kind, False)
-    if not ui.configbool('subrepos', '%s:allowed' % kind, default):
+    if not ui.configbool(b'subrepos', b'%s:allowed' % kind, default):
         raise error.Abort(
-            _('%s subrepos not allowed') % kind,
-            hint=_("see 'hg help config.subrepos' for details"),
+            _(b'%s subrepos not allowed') % kind,
+            hint=_(b"see 'hg help config.subrepos' for details"),
         )
 
     if kind not in types:
-        raise error.Abort(_('unknown subrepo type %s') % kind)
+        raise error.Abort(_(b'unknown subrepo type %s') % kind)
 
 
 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
@@ -209,9 +209,9 @@
     _auditsubrepopath(repo, path)
     state = ctx.substate[path]
     _checktype(repo.ui, state[2])
-    subrev = ''
-    if state[2] == 'hg':
-        subrev = "0" * 40
+    subrev = b''
+    if state[2] == b'hg':
+        subrev = b"0" * 40
     return types[state[2]](pctx, path, (state[0], subrev), True)
 
 
@@ -265,7 +265,7 @@
         This returns None, otherwise.
         """
         if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
-            return _('uncommitted changes in subrepository "%s"') % subrelpath(
+            return _(b'uncommitted changes in subrepository "%s"') % subrelpath(
                 self
             )
 
@@ -325,7 +325,7 @@
         return []
 
     def addremove(self, matcher, prefix, uipathfn, opts):
-        self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
+        self.ui.warn(b"%s: %s" % (prefix, _(b"addremove is not supported")))
         return 1
 
     def cat(self, match, fm, fntemplate, prefix, **opts):
@@ -353,7 +353,7 @@
 
     def fileflags(self, name):
         """return file flags"""
-        return ''
+        return b''
 
     def matchfileset(self, expr, badfn=None):
         """Resolve the fileset expression for this repo"""
@@ -371,13 +371,13 @@
         total = len(files)
         relpath = subrelpath(self)
         progress = self.ui.makeprogress(
-            _('archiving (%s)') % relpath, unit=_('files'), total=total
+            _(b'archiving (%s)') % relpath, unit=_(b'files'), total=total
         )
         progress.update(0)
         for name in files:
             flags = self.fileflags(name)
-            mode = 'x' in flags and 0o755 or 0o644
-            symlink = 'l' in flags
+            mode = b'x' in flags and 0o755 or 0o644
+            symlink = b'l' in flags
             archiver.addfile(
                 prefix + name, mode, symlink, self.filedata(name, decode)
             )
@@ -410,13 +410,13 @@
         filesystem.  Return 0 on success, 1 on any warning.
         """
         warnings.append(
-            _("warning: removefiles not implemented (%s)") % self._path
+            _(b"warning: removefiles not implemented (%s)") % self._path
         )
         return 1
 
     def revert(self, substate, *pats, **opts):
         self.ui.warn(
-            _('%s: reverting %s subrepos is unsupported\n')
+            _(b'%s: reverting %s subrepos is unsupported\n')
             % (substate[0], substate[2])
         )
         return []
@@ -454,18 +454,18 @@
         self._state = state
         r = ctx.repo()
         root = r.wjoin(util.localpath(path))
-        create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
+        create = allowcreate and not r.wvfs.exists(b'%s/.hg' % path)
         # repository constructor does expand variables in path, which is
         # unsafe since subrepo path might come from untrusted source.
         if os.path.realpath(util.expandpath(root)) != root:
             raise error.Abort(
-                _('subrepo path contains illegal component: %s') % path
+                _(b'subrepo path contains illegal component: %s') % path
             )
         self._repo = hg.repository(r.baseui, root, create=create)
         if self._repo.root != root:
             raise error.ProgrammingError(
-                'failed to reject unsafe subrepo '
-                'path: %s (expanded to %s)' % (root, self._repo.root)
+                b'failed to reject unsafe subrepo '
+                b'path: %s (expanded to %s)' % (root, self._repo.root)
             )
 
         # Propagate the parent's --hidden option
@@ -473,12 +473,12 @@
             self._repo = self._repo.unfiltered()
 
         self.ui = self._repo.ui
-        for s, k in [('ui', 'commitsubrepos')]:
+        for s, k in [(b'ui', b'commitsubrepos')]:
             v = r.ui.config(s, k)
             if v:
-                self.ui.setconfig(s, k, v, 'subrepo')
+                self.ui.setconfig(s, k, v, b'subrepo')
         # internal config: ui._usedassubrepo
-        self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
+        self.ui.setconfig(b'ui', b'_usedassubrepo', b'True', b'subrepo')
         self._initrepo(r, state[0], create)
 
     @annotatesubrepoerror
@@ -508,21 +508,21 @@
         This method is used to to detect when there are changes that may
         require a push to a given remote path.'''
         # sort the files that will be hashed in increasing (likely) file size
-        filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
-        yield '# %s\n' % _expandedabspath(remotepath)
+        filelist = (b'bookmarks', b'store/phaseroots', b'store/00changelog.i')
+        yield b'# %s\n' % _expandedabspath(remotepath)
         vfs = self._repo.vfs
         for relname in filelist:
             filehash = node.hex(hashlib.sha1(vfs.tryread(relname)).digest())
-            yield '%s = %s\n' % (relname, filehash)
+            yield b'%s = %s\n' % (relname, filehash)
 
     @propertycache
     def _cachestorehashvfs(self):
-        return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
+        return vfsmod.vfs(self._repo.vfs.join(b'cache/storehash'))
 
     def _readstorehashcache(self, remotepath):
         '''read the store hash cache for a given remote repository'''
         cachefile = _getstorehashcachename(remotepath)
-        return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
+        return self._cachestorehashvfs.tryreadlines(cachefile, b'r')
 
     def _cachestorehash(self, remotepath):
         '''cache the current store hash
@@ -534,7 +534,7 @@
         with self._repo.lock():
             storehash = list(self._calcstorehash(remotepath))
             vfs = self._cachestorehashvfs
-            vfs.writelines(cachefile, storehash, mode='wb', notindexed=True)
+            vfs.writelines(cachefile, storehash, mode=b'wb', notindexed=True)
 
     def _getctx(self):
         '''fetch the context for this subrepo revision, possibly a workingctx
@@ -551,20 +551,20 @@
         self._repo._subsource = source
 
         if create:
-            lines = ['[paths]\n']
+            lines = [b'[paths]\n']
 
             def addpathconfig(key, value):
                 if value:
-                    lines.append('%s = %s\n' % (key, value))
-                    self.ui.setconfig('paths', key, value, 'subrepo')
+                    lines.append(b'%s = %s\n' % (key, value))
+                    self.ui.setconfig(b'paths', key, value, b'subrepo')
 
             defpath = _abssource(self._repo, abort=False)
             defpushpath = _abssource(self._repo, True, abort=False)
-            addpathconfig('default', defpath)
+            addpathconfig(b'default', defpath)
             if defpath != defpushpath:
-                addpathconfig('default-push', defpushpath)
+                addpathconfig(b'default-push', defpushpath)
 
-            self._repo.vfs.write('hgrc', util.tonativeeol(''.join(lines)))
+            self._repo.vfs.write(b'hgrc', util.tonativeeol(b''.join(lines)))
 
     @annotatesubrepoerror
     def add(self, ui, match, prefix, uipathfn, explicitonly, **opts):
@@ -578,7 +578,7 @@
         # always entry any of its subrepos.  Don't corrupt the options that will
         # be used to process sibling subrepos however.
         opts = copy.copy(opts)
-        opts['subrepos'] = True
+        opts[b'subrepos'] = True
         return scmutil.addremove(self._repo, m, prefix, uipathfn, opts)
 
     @annotatesubrepoerror
@@ -598,7 +598,7 @@
             return self._repo.status(ctx1, ctx2, **opts)
         except error.RepoLookupError as inst:
             self.ui.warn(
-                _('warning: error "%s" in subrepository "%s"\n')
+                _(b'warning: error "%s" in subrepository "%s"\n')
                 % (inst, subrelpath(self))
             )
             return scmutil.status([], [], [], [], [], [], [])
@@ -624,13 +624,13 @@
             )
         except error.RepoLookupError as inst:
             self.ui.warn(
-                _('warning: error "%s" in subrepository "%s"\n')
+                _(b'warning: error "%s" in subrepository "%s"\n')
                 % (inst, subrelpath(self))
             )
 
     @annotatesubrepoerror
     def archive(self, archiver, prefix, match=None, decode=True):
-        self._get(self._state + ('hg',))
+        self._get(self._state + (b'hg',))
         files = self.files()
         if match:
             files = [f for f in files if match(f)]
@@ -643,14 +643,14 @@
         for subpath in ctx.substate:
             s = subrepo(ctx, subpath, True)
             submatch = matchmod.subdirmatcher(subpath, match)
-            subprefix = prefix + subpath + '/'
+            subprefix = prefix + subpath + b'/'
             total += s.archive(archiver, subprefix, submatch, decode)
         return total
 
     @annotatesubrepoerror
     def dirty(self, ignoreupdate=False, missing=False):
         r = self._state[1]
-        if r == '' and not ignoreupdate:  # no state recorded
+        if r == b'' and not ignoreupdate:  # no state recorded
             return True
         w = self._repo[None]
         if r != w.p1().hex() and not ignoreupdate:
@@ -659,7 +659,7 @@
         return w.dirty(missing=missing)  # working directory changed
 
     def basestate(self):
-        return self._repo['.'].hex()
+        return self._repo[b'.'].hex()
 
     def checknested(self, path):
         return self._repo._checknested(self._repo.wjoin(path))
@@ -669,22 +669,22 @@
         # don't bother committing in the subrepo if it's only been
         # updated
         if not self.dirty(True):
-            return self._repo['.'].hex()
-        self.ui.debug("committing subrepo %s\n" % subrelpath(self))
+            return self._repo[b'.'].hex()
+        self.ui.debug(b"committing subrepo %s\n" % subrelpath(self))
         n = self._repo.commit(text, user, date)
         if not n:
-            return self._repo['.'].hex()  # different version checked out
+            return self._repo[b'.'].hex()  # different version checked out
         return node.hex(n)
 
     @annotatesubrepoerror
     def phase(self, state):
-        return self._repo[state or '.'].phase()
+        return self._repo[state or b'.'].phase()
 
     @annotatesubrepoerror
     def remove(self):
         # we can't fully delete the repository as it may contain
         # local-only history
-        self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
+        self.ui.note(_(b'removing subrepo %s\n') % subrelpath(self))
         hg.clean(self._repo, node.nullid, False)
 
     def _get(self, state):
@@ -713,7 +713,7 @@
             # work with that.
             if parentrepo.shared() and hg.islocal(srcurl):
                 self.ui.status(
-                    _('sharing subrepo %s from %s\n')
+                    _(b'sharing subrepo %s from %s\n')
                     % (subrelpath(self), srcurl)
                 )
                 shared = hg.share(
@@ -728,19 +728,19 @@
                 # TODO: find a common place for this and this code in the
                 # share.py wrap of the clone command.
                 if parentrepo.shared():
-                    pool = self.ui.config('share', 'pool')
+                    pool = self.ui.config(b'share', b'pool')
                     if pool:
                         pool = util.expandpath(pool)
 
                     shareopts = {
-                        'pool': pool,
-                        'mode': self.ui.config('share', 'poolnaming'),
+                        b'pool': pool,
+                        b'mode': self.ui.config(b'share', b'poolnaming'),
                     }
                 else:
                     shareopts = {}
 
                 self.ui.status(
-                    _('cloning subrepo %s from %s\n')
+                    _(b'cloning subrepo %s from %s\n')
                     % (subrelpath(self), util.hidepassword(srcurl))
                 )
                 other, cloned = hg.clone(
@@ -756,7 +756,7 @@
             self._cachestorehash(srcurl)
         else:
             self.ui.status(
-                _('pulling subrepo %s from %s\n')
+                _(b'pulling subrepo %s from %s\n')
                 % (subrelpath(self), util.hidepassword(srcurl))
             )
             cleansub = self.storeclean(srcurl)
@@ -771,13 +771,13 @@
         inrepo = self._get(state)
         source, revision, kind = state
         repo = self._repo
-        repo.ui.debug("getting subrepo %s\n" % self._path)
+        repo.ui.debug(b"getting subrepo %s\n" % self._path)
         if inrepo:
             urepo = repo.unfiltered()
             ctx = urepo[revision]
             if ctx.hidden():
                 urepo.ui.warn(
-                    _('revision %s in subrepository "%s" is hidden\n')
+                    _(b'revision %s in subrepository "%s" is hidden\n')
                     % (revision[0:12], self._path)
                 )
                 repo = urepo
@@ -786,22 +786,24 @@
     @annotatesubrepoerror
     def merge(self, state):
         self._get(state)
-        cur = self._repo['.']
+        cur = self._repo[b'.']
         dst = self._repo[state[1]]
         anc = dst.ancestor(cur)
 
         def mergefunc():
             if anc == cur and dst.branch() == cur.branch():
                 self.ui.debug(
-                    'updating subrepository "%s"\n' % subrelpath(self)
+                    b'updating subrepository "%s"\n' % subrelpath(self)
                 )
                 hg.update(self._repo, state[1])
             elif anc == dst:
                 self.ui.debug(
-                    'skipping subrepository "%s"\n' % subrelpath(self)
+                    b'skipping subrepository "%s"\n' % subrelpath(self)
                 )
             else:
-                self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
+                self.ui.debug(
+                    b'merging subrepository "%s"\n' % subrelpath(self)
+                )
                 hg.merge(self._repo, state[1], remind=False)
 
         wctx = self._repo[None]
@@ -816,12 +818,12 @@
 
     @annotatesubrepoerror
     def push(self, opts):
-        force = opts.get('force')
-        newbranch = opts.get('new_branch')
-        ssh = opts.get('ssh')
+        force = opts.get(b'force')
+        newbranch = opts.get(b'new_branch')
+        ssh = opts.get(b'ssh')
 
         # push subrepos depth-first for coherent ordering
-        c = self._repo['.']
+        c = self._repo[b'.']
         subs = c.substate  # only repos that are committed
         for s in sorted(subs):
             if c.sub(s).push(opts) == 0:
@@ -831,15 +833,15 @@
         if not force:
             if self.storeclean(dsturl):
                 self.ui.status(
-                    _('no changes made to subrepo %s since last push to %s\n')
+                    _(b'no changes made to subrepo %s since last push to %s\n')
                     % (subrelpath(self), util.hidepassword(dsturl))
                 )
                 return None
         self.ui.status(
-            _('pushing subrepo %s to %s\n')
+            _(b'pushing subrepo %s to %s\n')
             % (subrelpath(self), util.hidepassword(dsturl))
         )
-        other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
+        other = hg.peer(self._repo, {b'ssh': ssh}, dsturl)
         res = exchange.push(self._repo, other, force, newbranch=newbranch)
 
         # the repo is now clean
@@ -848,18 +850,18 @@
 
     @annotatesubrepoerror
     def outgoing(self, ui, dest, opts):
-        if 'rev' in opts or 'branch' in opts:
+        if b'rev' in opts or b'branch' in opts:
             opts = copy.copy(opts)
-            opts.pop('rev', None)
-            opts.pop('branch', None)
+            opts.pop(b'rev', None)
+            opts.pop(b'branch', None)
         return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
 
     @annotatesubrepoerror
     def incoming(self, ui, source, opts):
-        if 'rev' in opts or 'branch' in opts:
+        if b'rev' in opts or b'branch' in opts:
             opts = copy.copy(opts)
-            opts.pop('rev', None)
-            opts.pop('branch', None)
+            opts.pop(b'rev', None)
+            opts.pop(b'branch', None)
         return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
 
     @annotatesubrepoerror
@@ -910,7 +912,7 @@
                 matchers.append(pm)
             except error.LookupError:
                 self.ui.status(
-                    _("skipping missing subrepository: %s\n")
+                    _(b"skipping missing subrepository: %s\n")
                     % self.wvfs.reljoin(reporelpath(self), subpath)
                 )
         if len(matchers) == 1:
@@ -965,7 +967,7 @@
         #    files inside the subrepo
         # 2. update the subrepo to the revision specified in
         #    the corresponding substate dictionary
-        self.ui.status(_('reverting subrepo %s\n') % substate[0])
+        self.ui.status(_(b'reverting subrepo %s\n') % substate[0])
         if not opts.get(r'no_backup'):
             # Revert all files on the subrepo, creating backups
             # Note that this will not recursively revert subrepos
@@ -984,7 +986,7 @@
         ctx = self._repo[opts[r'rev']]
         parents = self._repo.dirstate.parents()
         if opts.get(r'all'):
-            pats = ['set:modified()']
+            pats = [b'set:modified()']
         else:
             pats = []
         cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
@@ -1007,7 +1009,7 @@
         # subrepo.  Alternately, the previous unshare attempt may have failed
         # part way through.  So recurse whether or not this layer is shared.
         if self._repo.shared():
-            self.ui.status(_("unsharing subrepo '%s'\n") % self._relpath)
+            self.ui.status(_(b"unsharing subrepo '%s'\n") % self._relpath)
 
         hg.unshare(self.ui, self._repo)
 
@@ -1020,7 +1022,7 @@
                 # explicit warning.
                 ui = self._repo.ui
                 ui.warn(
-                    _("subrepo '%s' is hidden in revision %s\n")
+                    _(b"subrepo '%s' is hidden in revision %s\n")
                     % (self._relpath, node.short(self._ctx.node()))
                 )
             return 0
@@ -1028,7 +1030,7 @@
             # A missing subrepo revision may be a case of needing to pull it, so
             # don't treat this as an error.
             self._repo.ui.warn(
-                _("subrepo '%s' not found in revision %s\n")
+                _(b"subrepo '%s' not found in revision %s\n")
                 % (self._relpath, node.short(self._ctx.node()))
             )
             return 0
@@ -1051,13 +1053,13 @@
     def __init__(self, ctx, path, state, allowcreate):
         super(svnsubrepo, self).__init__(ctx, path)
         self._state = state
-        self._exe = procutil.findexe('svn')
+        self._exe = procutil.findexe(b'svn')
         if not self._exe:
             raise error.Abort(
-                _("'svn' executable not found for subrepo '%s'") % self._path
+                _(b"'svn' executable not found for subrepo '%s'") % self._path
             )
 
-    def _svncommand(self, commands, filename='', failok=False):
+    def _svncommand(self, commands, filename=b'', failok=False):
         cmd = [self._exe]
         extrakw = {}
         if not self.ui.interactive():
@@ -1068,8 +1070,8 @@
             # instead of being per-command, but we need to support 1.4 so
             # we have to be intelligent about what commands take
             # --non-interactive.
-            if commands[0] in ('update', 'checkout', 'commit'):
-                cmd.append('--non-interactive')
+            if commands[0] in (b'update', b'checkout', b'commit'):
+                cmd.append(b'--non-interactive')
         cmd.extend(commands)
         if filename is not None:
             path = self.wvfs.reljoin(
@@ -1078,11 +1080,11 @@
             cmd.append(path)
         env = dict(encoding.environ)
         # Avoid localized output, preserve current locale for everything else.
-        lc_all = env.get('LC_ALL')
+        lc_all = env.get(b'LC_ALL')
         if lc_all:
-            env['LANG'] = lc_all
-            del env['LC_ALL']
-        env['LC_MESSAGES'] = 'C'
+            env[b'LANG'] = lc_all
+            del env[b'LC_ALL']
+        env[b'LC_MESSAGES'] = b'C'
         p = subprocess.Popen(
             pycompat.rapply(procutil.tonativestr, cmd),
             bufsize=-1,
@@ -1097,38 +1099,40 @@
         if not failok:
             if p.returncode:
                 raise error.Abort(
-                    stderr or 'exited with code %d' % p.returncode
+                    stderr or b'exited with code %d' % p.returncode
                 )
             if stderr:
-                self.ui.warn(stderr + '\n')
+                self.ui.warn(stderr + b'\n')
         return stdout, stderr
 
     @propertycache
     def _svnversion(self):
-        output, err = self._svncommand(['--version', '--quiet'], filename=None)
+        output, err = self._svncommand(
+            [b'--version', b'--quiet'], filename=None
+        )
         m = re.search(br'^(\d+)\.(\d+)', output)
         if not m:
-            raise error.Abort(_('cannot retrieve svn tool version'))
+            raise error.Abort(_(b'cannot retrieve svn tool version'))
         return (int(m.group(1)), int(m.group(2)))
 
     def _svnmissing(self):
-        return not self.wvfs.exists('.svn')
+        return not self.wvfs.exists(b'.svn')
 
     def _wcrevs(self):
         # Get the working directory revision as well as the last
         # commit revision so we can compare the subrepo state with
         # both. We used to store the working directory one.
-        output, err = self._svncommand(['info', '--xml'])
+        output, err = self._svncommand([b'info', b'--xml'])
         doc = xml.dom.minidom.parseString(output)
         entries = doc.getElementsByTagName(r'entry')
-        lastrev, rev = '0', '0'
+        lastrev, rev = b'0', b'0'
         if entries:
-            rev = pycompat.bytestr(entries[0].getAttribute(r'revision')) or '0'
+            rev = pycompat.bytestr(entries[0].getAttribute(r'revision')) or b'0'
             commits = entries[0].getElementsByTagName(r'commit')
             if commits:
                 lastrev = (
                     pycompat.bytestr(commits[0].getAttribute(r'revision'))
-                    or '0'
+                    or b'0'
                 )
         return (lastrev, rev)
 
@@ -1141,7 +1145,7 @@
         True if any of these changes concern an external entry and missing
         is True if any change is a missing entry.
         """
-        output, err = self._svncommand(['status', '--xml'])
+        output, err = self._svncommand([b'status', b'--xml'])
         externals, changes, missing = [], [], []
         doc = xml.dom.minidom.parseString(output)
         for e in doc.getElementsByTagName(r'entry'):
@@ -1171,7 +1175,7 @@
     @annotatesubrepoerror
     def dirty(self, ignoreupdate=False, missing=False):
         if self._svnmissing():
-            return self._state[1] != ''
+            return self._state[1] != b''
         wcchanged = self._wcchanged()
         changed = wcchanged[0] or (missing and wcchanged[2])
         if not changed:
@@ -1187,7 +1191,9 @@
             # URL exists at lastrev.  Test it and fallback to rev it
             # is not there.
             try:
-                self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
+                self._svncommand(
+                    [b'list', b'%s@%s' % (self._state[0], lastrev)]
+                )
                 return lastrev
             except error.Abort:
                 pass
@@ -1201,35 +1207,35 @@
             return self.basestate()
         if extchanged:
             # Do not try to commit externals
-            raise error.Abort(_('cannot commit svn externals'))
+            raise error.Abort(_(b'cannot commit svn externals'))
         if missing:
             # svn can commit with missing entries but aborting like hg
             # seems a better approach.
-            raise error.Abort(_('cannot commit missing svn entries'))
-        commitinfo, err = self._svncommand(['commit', '-m', text])
+            raise error.Abort(_(b'cannot commit missing svn entries'))
+        commitinfo, err = self._svncommand([b'commit', b'-m', text])
         self.ui.status(commitinfo)
-        newrev = re.search('Committed revision ([0-9]+).', commitinfo)
+        newrev = re.search(b'Committed revision ([0-9]+).', commitinfo)
         if not newrev:
             if not commitinfo.strip():
                 # Sometimes, our definition of "changed" differs from
                 # svn one. For instance, svn ignores missing files
                 # when committing. If there are only missing files, no
                 # commit is made, no output and no error code.
-                raise error.Abort(_('failed to commit svn changes'))
+                raise error.Abort(_(b'failed to commit svn changes'))
             raise error.Abort(commitinfo.splitlines()[-1])
         newrev = newrev.groups()[0]
-        self.ui.status(self._svncommand(['update', '-r', newrev])[0])
+        self.ui.status(self._svncommand([b'update', b'-r', newrev])[0])
         return newrev
 
     @annotatesubrepoerror
     def remove(self):
         if self.dirty():
             self.ui.warn(
-                _('not removing repo %s because ' 'it has changes.\n')
+                _(b'not removing repo %s because ' b'it has changes.\n')
                 % self._path
             )
             return
-        self.ui.note(_('removing subrepo %s\n') % self._path)
+        self.ui.note(_(b'removing subrepo %s\n') % self._path)
 
         self.wvfs.rmtree(forcibly=True)
         try:
@@ -1241,21 +1247,21 @@
     @annotatesubrepoerror
     def get(self, state, overwrite=False):
         if overwrite:
-            self._svncommand(['revert', '--recursive'])
-        args = ['checkout']
+            self._svncommand([b'revert', b'--recursive'])
+        args = [b'checkout']
         if self._svnversion >= (1, 5):
-            args.append('--force')
+            args.append(b'--force')
         # The revision must be specified at the end of the URL to properly
         # update to a directory which has since been deleted and recreated.
-        args.append('%s@%s' % (state[0], state[1]))
+        args.append(b'%s@%s' % (state[0], state[1]))
 
         # SEC: check that the ssh url is safe
         util.checksafessh(state[0])
 
         status, err = self._svncommand(args, failok=True)
-        _sanitize(self.ui, self.wvfs, '.svn')
-        if not re.search('Checked out revision [0-9]+.', status):
-            if 'is already a working copy for a different URL' in err and (
+        _sanitize(self.ui, self.wvfs, b'.svn')
+        if not re.search(b'Checked out revision [0-9]+.', status):
+            if b'is already a working copy for a different URL' in err and (
                 self._wcchanged()[:2] == (False, False)
             ):
                 # obstructed but clean working copy, so just blow it away.
@@ -1281,12 +1287,12 @@
 
     @annotatesubrepoerror
     def files(self):
-        output = self._svncommand(['list', '--recursive', '--xml'])[0]
+        output = self._svncommand([b'list', b'--recursive', b'--xml'])[0]
         doc = xml.dom.minidom.parseString(output)
         paths = []
         for e in doc.getElementsByTagName(r'entry'):
             kind = pycompat.bytestr(e.getAttribute(r'kind'))
-            if kind != 'file':
+            if kind != b'file':
                 continue
             name = r''.join(
                 c.data
@@ -1297,7 +1303,7 @@
         return paths
 
     def filedata(self, name, decode):
-        return self._svncommand(['cat'], name)[0]
+        return self._svncommand([b'cat'], name)[0]
 
 
 class gitsubrepo(abstractsubrepo):
@@ -1310,25 +1316,25 @@
 
     def _ensuregit(self):
         try:
-            self._gitexecutable = 'git'
-            out, err = self._gitnodir(['--version'])
+            self._gitexecutable = b'git'
+            out, err = self._gitnodir([b'--version'])
         except OSError as e:
-            genericerror = _("error executing git for subrepo '%s': %s")
-            notfoundhint = _("check git is installed and in your PATH")
+            genericerror = _(b"error executing git for subrepo '%s': %s")
+            notfoundhint = _(b"check git is installed and in your PATH")
             if e.errno != errno.ENOENT:
                 raise error.Abort(
                     genericerror % (self._path, encoding.strtolocal(e.strerror))
                 )
             elif pycompat.iswindows:
                 try:
-                    self._gitexecutable = 'git.cmd'
-                    out, err = self._gitnodir(['--version'])
+                    self._gitexecutable = b'git.cmd'
+                    out, err = self._gitnodir([b'--version'])
                 except OSError as e2:
                     if e2.errno == errno.ENOENT:
                         raise error.Abort(
                             _(
-                                "couldn't find 'git' or 'git.cmd'"
-                                " for subrepo '%s'"
+                                b"couldn't find 'git' or 'git.cmd'"
+                                b" for subrepo '%s'"
                             )
                             % self._path,
                             hint=notfoundhint,
@@ -1340,16 +1346,18 @@
                         )
             else:
                 raise error.Abort(
-                    _("couldn't find git for subrepo '%s'") % self._path,
+                    _(b"couldn't find git for subrepo '%s'") % self._path,
                     hint=notfoundhint,
                 )
         versionstatus = self._checkversion(out)
-        if versionstatus == 'unknown':
-            self.ui.warn(_('cannot retrieve git version\n'))
-        elif versionstatus == 'abort':
-            raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
-        elif versionstatus == 'warning':
-            self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
+        if versionstatus == b'unknown':
+            self.ui.warn(_(b'cannot retrieve git version\n'))
+        elif versionstatus == b'abort':
+            raise error.Abort(
+                _(b'git subrepo requires at least 1.6.0 or later')
+            )
+        elif versionstatus == b'warning':
+            self.ui.warn(_(b'git subrepo requires at least 1.6.0 or later\n'))
 
     @staticmethod
     def _gitversion(out):
@@ -1392,12 +1400,12 @@
         # despite the docstring comment.  For now, error on 1.4.0, warn on
         # 1.5.0 but attempt to continue.
         if version == -1:
-            return 'unknown'
+            return b'unknown'
         if version < (1, 5, 0):
-            return 'abort'
+            return b'abort'
         elif version < (1, 6, 0):
-            return 'warning'
-        return 'ok'
+            return b'warning'
+        return b'ok'
 
     def _gitcommand(self, commands, env=None, stream=False):
         return self._gitdir(commands, env=env, stream=stream)[0]
@@ -1413,23 +1421,23 @@
         The methods tries to call the git command. versions prior to 1.6.0
         are not supported and very probably fail.
         """
-        self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
+        self.ui.debug(b'%s: git %s\n' % (self._relpath, b' '.join(commands)))
         if env is None:
             env = encoding.environ.copy()
         # disable localization for Git output (issue5176)
-        env['LC_ALL'] = 'C'
+        env[b'LC_ALL'] = b'C'
         # fix for Git CVE-2015-7545
-        if 'GIT_ALLOW_PROTOCOL' not in env:
-            env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
+        if b'GIT_ALLOW_PROTOCOL' not in env:
+            env[b'GIT_ALLOW_PROTOCOL'] = b'file:git:http:https:ssh'
         # unless ui.quiet is set, print git's stderr,
         # which is mostly progress and useful info
         errpipe = None
         if self.ui.quiet:
-            errpipe = open(os.devnull, 'w')
-        if self.ui._colormode and len(commands) and commands[0] == "diff":
+            errpipe = open(os.devnull, b'w')
+        if self.ui._colormode and len(commands) and commands[0] == b"diff":
             # insert the argument in the front,
             # the end of git diff arguments is used for paths
-            commands.insert(1, '--color')
+            commands.insert(1, b'--color')
         p = subprocess.Popen(
             pycompat.rapply(
                 procutil.tonativestr, [self._gitexecutable] + commands
@@ -1451,50 +1459,50 @@
         if p.returncode != 0 and p.returncode != 1:
             # there are certain error codes that are ok
             command = commands[0]
-            if command in ('cat-file', 'symbolic-ref'):
+            if command in (b'cat-file', b'symbolic-ref'):
                 return retdata, p.returncode
             # for all others, abort
             raise error.Abort(
-                _('git %s error %d in %s')
+                _(b'git %s error %d in %s')
                 % (command, p.returncode, self._relpath)
             )
 
         return retdata, p.returncode
 
     def _gitmissing(self):
-        return not self.wvfs.exists('.git')
+        return not self.wvfs.exists(b'.git')
 
     def _gitstate(self):
-        return self._gitcommand(['rev-parse', 'HEAD'])
+        return self._gitcommand([b'rev-parse', b'HEAD'])
 
     def _gitcurrentbranch(self):
-        current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
+        current, err = self._gitdir([b'symbolic-ref', b'HEAD', b'--quiet'])
         if err:
             current = None
         return current
 
     def _gitremote(self, remote):
-        out = self._gitcommand(['remote', 'show', '-n', remote])
-        line = out.split('\n')[1]
-        i = line.index('URL: ') + len('URL: ')
+        out = self._gitcommand([b'remote', b'show', b'-n', remote])
+        line = out.split(b'\n')[1]
+        i = line.index(b'URL: ') + len(b'URL: ')
         return line[i:]
 
     def _githavelocally(self, revision):
-        out, code = self._gitdir(['cat-file', '-e', revision])
+        out, code = self._gitdir([b'cat-file', b'-e', revision])
         return code == 0
 
     def _gitisancestor(self, r1, r2):
-        base = self._gitcommand(['merge-base', r1, r2])
+        base = self._gitcommand([b'merge-base', r1, r2])
         return base == r1
 
     def _gitisbare(self):
-        return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
+        return self._gitcommand([b'config', b'--bool', b'core.bare']) == b'true'
 
     def _gitupdatestat(self):
         """This must be run before git diff-index.
         diff-index only looks at changes to file stat;
         this command looks at file contents and updates the stat."""
-        self._gitcommand(['update-index', '-q', '--refresh'])
+        self._gitcommand([b'update-index', b'-q', b'--refresh'])
 
     def _gitbranchmap(self):
         '''returns 2 things:
@@ -1504,41 +1512,41 @@
         rev2branch = {}
 
         out = self._gitcommand(
-            ['for-each-ref', '--format', '%(objectname) %(refname)']
+            [b'for-each-ref', b'--format', b'%(objectname) %(refname)']
         )
-        for line in out.split('\n'):
-            revision, ref = line.split(' ')
-            if not ref.startswith('refs/heads/') and not ref.startswith(
-                'refs/remotes/'
+        for line in out.split(b'\n'):
+            revision, ref = line.split(b' ')
+            if not ref.startswith(b'refs/heads/') and not ref.startswith(
+                b'refs/remotes/'
             ):
                 continue
-            if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
+            if ref.startswith(b'refs/remotes/') and ref.endswith(b'/HEAD'):
                 continue  # ignore remote/HEAD redirects
             branch2rev[ref] = revision
             rev2branch.setdefault(revision, []).append(ref)
         return branch2rev, rev2branch
 
     def _gittracking(self, branches):
-        'return map of remote branch to local tracking branch'
+        b'return map of remote branch to local tracking branch'
         # assumes no more than one local tracking branch for each remote
         tracking = {}
         for b in branches:
-            if b.startswith('refs/remotes/'):
+            if b.startswith(b'refs/remotes/'):
                 continue
-            bname = b.split('/', 2)[2]
-            remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
+            bname = b.split(b'/', 2)[2]
+            remote = self._gitcommand([b'config', b'branch.%s.remote' % bname])
             if remote:
-                ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
+                ref = self._gitcommand([b'config', b'branch.%s.merge' % bname])
                 tracking[
-                    'refs/remotes/%s/%s' % (remote, ref.split('/', 2)[2])
+                    b'refs/remotes/%s/%s' % (remote, ref.split(b'/', 2)[2])
                 ] = b
         return tracking
 
     def _abssource(self, source):
-        if '://' not in source:
+        if b'://' not in source:
             # recognize the scp syntax as an absolute source
-            colon = source.find(':')
-            if colon != -1 and '/' not in source[:colon]:
+            colon = source.find(b':')
+            if colon != -1 and b'/' not in source[:colon]:
                 return source
         self._subsource = source
         return _abssource(self)
@@ -1550,27 +1558,27 @@
 
             source = self._abssource(source)
             self.ui.status(
-                _('cloning subrepo %s from %s\n') % (self._relpath, source)
+                _(b'cloning subrepo %s from %s\n') % (self._relpath, source)
             )
-            self._gitnodir(['clone', source, self._abspath])
+            self._gitnodir([b'clone', source, self._abspath])
         if self._githavelocally(revision):
             return
         self.ui.status(
-            _('pulling subrepo %s from %s\n')
-            % (self._relpath, self._gitremote('origin'))
+            _(b'pulling subrepo %s from %s\n')
+            % (self._relpath, self._gitremote(b'origin'))
         )
         # try only origin: the originally cloned repo
-        self._gitcommand(['fetch'])
+        self._gitcommand([b'fetch'])
         if not self._githavelocally(revision):
             raise error.Abort(
-                _('revision %s does not exist in subrepository ' '"%s"\n')
+                _(b'revision %s does not exist in subrepository ' b'"%s"\n')
                 % (revision, self._relpath)
             )
 
     @annotatesubrepoerror
     def dirty(self, ignoreupdate=False, missing=False):
         if self._gitmissing():
-            return self._state[1] != ''
+            return self._state[1] != b''
         if self._gitisbare():
             return True
         if not ignoreupdate and self._state[1] != self._gitstate():
@@ -1578,7 +1586,7 @@
             return True
         # check for staged changes or modified files; ignore untracked files
         self._gitupdatestat()
-        out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
+        out, code = self._gitdir([b'diff-index', b'--quiet', b'HEAD'])
         return code == 1
 
     def basestate(self):
@@ -1593,41 +1601,41 @@
         self._fetch(source, revision)
         # if the repo was set to be bare, unbare it
         if self._gitisbare():
-            self._gitcommand(['config', 'core.bare', 'false'])
+            self._gitcommand([b'config', b'core.bare', b'false'])
             if self._gitstate() == revision:
-                self._gitcommand(['reset', '--hard', 'HEAD'])
+                self._gitcommand([b'reset', b'--hard', b'HEAD'])
                 return
         elif self._gitstate() == revision:
             if overwrite:
                 # first reset the index to unmark new files for commit, because
                 # reset --hard will otherwise throw away files added for commit,
                 # not just unmark them.
-                self._gitcommand(['reset', 'HEAD'])
-                self._gitcommand(['reset', '--hard', 'HEAD'])
+                self._gitcommand([b'reset', b'HEAD'])
+                self._gitcommand([b'reset', b'--hard', b'HEAD'])
             return
         branch2rev, rev2branch = self._gitbranchmap()
 
         def checkout(args):
-            cmd = ['checkout']
+            cmd = [b'checkout']
             if overwrite:
                 # first reset the index to unmark new files for commit, because
                 # the -f option will otherwise throw away files added for
                 # commit, not just unmark them.
-                self._gitcommand(['reset', 'HEAD'])
-                cmd.append('-f')
+                self._gitcommand([b'reset', b'HEAD'])
+                cmd.append(b'-f')
             self._gitcommand(cmd + args)
-            _sanitize(self.ui, self.wvfs, '.git')
+            _sanitize(self.ui, self.wvfs, b'.git')
 
         def rawcheckout():
             # no branch to checkout, check it out with no branch
             self.ui.warn(
-                _('checking out detached HEAD in ' 'subrepository "%s"\n')
+                _(b'checking out detached HEAD in ' b'subrepository "%s"\n')
                 % self._relpath
             )
             self.ui.warn(
-                _('check out a git branch if you intend ' 'to make changes\n')
+                _(b'check out a git branch if you intend ' b'to make changes\n')
             )
-            checkout(['-q', revision])
+            checkout([b'-q', revision])
 
         if revision not in rev2branch:
             rawcheckout()
@@ -1635,11 +1643,11 @@
         branches = rev2branch[revision]
         firstlocalbranch = None
         for b in branches:
-            if b == 'refs/heads/master':
+            if b == b'refs/heads/master':
                 # master trumps all other branches
-                checkout(['refs/heads/master'])
+                checkout([b'refs/heads/master'])
                 return
-            if not firstlocalbranch and not b.startswith('refs/remotes/'):
+            if not firstlocalbranch and not b.startswith(b'refs/remotes/'):
                 firstlocalbranch = b
         if firstlocalbranch:
             checkout([firstlocalbranch])
@@ -1656,8 +1664,8 @@
 
         if remote not in tracking:
             # create a new local tracking branch
-            local = remote.split('/', 3)[3]
-            checkout(['-b', local, remote])
+            local = remote.split(b'/', 3)[3]
+            checkout([b'-b', local, remote])
         elif self._gitisancestor(branch2rev[tracking[remote]], remote):
             # When updating to a tracked remote branch,
             # if the local tracking branch is downstream of it,
@@ -1667,8 +1675,8 @@
             # detect this situation and perform this action lazily.
             if tracking[remote] != self._gitcurrentbranch():
                 checkout([tracking[remote]])
-            self._gitcommand(['merge', '--ff', remote])
-            _sanitize(self.ui, self.wvfs, '.git')
+            self._gitcommand([b'merge', b'--ff', remote])
+            _sanitize(self.ui, self.wvfs, b'.git')
         else:
             # a real merge would be required, just checkout the revision
             rawcheckout()
@@ -1676,16 +1684,16 @@
     @annotatesubrepoerror
     def commit(self, text, user, date):
         if self._gitmissing():
-            raise error.Abort(_("subrepo %s is missing") % self._relpath)
-        cmd = ['commit', '-a', '-m', text]
+            raise error.Abort(_(b"subrepo %s is missing") % self._relpath)
+        cmd = [b'commit', b'-a', b'-m', text]
         env = encoding.environ.copy()
         if user:
-            cmd += ['--author', user]
+            cmd += [b'--author', user]
         if date:
             # git's date parser silently ignores when seconds < 1e9
             # convert to ISO8601
-            env['GIT_AUTHOR_DATE'] = dateutil.datestr(
-                date, '%Y-%m-%dT%H:%M:%S %1%2'
+            env[b'GIT_AUTHOR_DATE'] = dateutil.datestr(
+                date, b'%Y-%m-%dT%H:%M:%S %1%2'
             )
         self._gitcommand(cmd, env=env)
         # make sure commit works otherwise HEAD might not exist under certain
@@ -1696,16 +1704,16 @@
     def merge(self, state):
         source, revision, kind = state
         self._fetch(source, revision)
-        base = self._gitcommand(['merge-base', revision, self._state[1]])
+        base = self._gitcommand([b'merge-base', revision, self._state[1]])
         self._gitupdatestat()
-        out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
+        out, code = self._gitdir([b'diff-index', b'--quiet', b'HEAD'])
 
         def mergefunc():
             if base == revision:
                 self.get(state)  # fast forward merge
             elif base != self._state[1]:
-                self._gitcommand(['merge', '--no-commit', revision])
-            _sanitize(self.ui, self.wvfs, '.git')
+                self._gitcommand([b'merge', b'--no-commit', revision])
+            _sanitize(self.ui, self.wvfs, b'.git')
 
         if self.dirty():
             if self._gitstate() != revision:
@@ -1719,26 +1727,26 @@
 
     @annotatesubrepoerror
     def push(self, opts):
-        force = opts.get('force')
+        force = opts.get(b'force')
 
         if not self._state[1]:
             return True
         if self._gitmissing():
-            raise error.Abort(_("subrepo %s is missing") % self._relpath)
+            raise error.Abort(_(b"subrepo %s is missing") % self._relpath)
         # if a branch in origin contains the revision, nothing to do
         branch2rev, rev2branch = self._gitbranchmap()
         if self._state[1] in rev2branch:
             for b in rev2branch[self._state[1]]:
-                if b.startswith('refs/remotes/origin/'):
+                if b.startswith(b'refs/remotes/origin/'):
                     return True
         for b, revision in branch2rev.iteritems():
-            if b.startswith('refs/remotes/origin/'):
+            if b.startswith(b'refs/remotes/origin/'):
                 if self._gitisancestor(self._state[1], revision):
                     return True
         # otherwise, try to push the currently checked out branch
-        cmd = ['push']
+        cmd = [b'push']
         if force:
-            cmd.append('--force')
+            cmd.append(b'--force')
 
         current = self._gitcurrentbranch()
         if current:
@@ -1746,23 +1754,23 @@
             if not self._gitisancestor(self._state[1], current):
                 self.ui.warn(
                     _(
-                        'unrelated git branch checked out '
-                        'in subrepository "%s"\n'
+                        b'unrelated git branch checked out '
+                        b'in subrepository "%s"\n'
                     )
                     % self._relpath
                 )
                 return False
             self.ui.status(
-                _('pushing branch %s of subrepository "%s"\n')
-                % (current.split('/', 2)[2], self._relpath)
+                _(b'pushing branch %s of subrepository "%s"\n')
+                % (current.split(b'/', 2)[2], self._relpath)
             )
-            ret = self._gitdir(cmd + ['origin', current])
+            ret = self._gitdir(cmd + [b'origin', current])
             return ret[1] == 0
         else:
             self.ui.warn(
                 _(
-                    'no branch checked out in subrepository "%s"\n'
-                    'cannot push revision %s\n'
+                    b'no branch checked out in subrepository "%s"\n'
+                    b'cannot push revision %s\n'
                 )
                 % (self._relpath, self._state[1])
             )
@@ -1789,11 +1797,11 @@
         files = [f for f in sorted(set(files)) if match(f)]
         for f in files:
             exact = match.exact(f)
-            command = ["add"]
+            command = [b"add"]
             if exact:
-                command.append("-f")  # should be added, even if ignored
+                command.append(b"-f")  # should be added, even if ignored
             if ui.verbose or not exact:
-                ui.status(_('adding %s\n') % uipathfn(f))
+                ui.status(_(b'adding %s\n') % uipathfn(f))
 
             if f in tracked:  # hg prints 'adding' even if already tracked
                 if exact:
@@ -1803,7 +1811,7 @@
                 self._gitcommand(command + [f])
 
         for f in rejected:
-            ui.warn(_("%s already tracked!\n") % uipathfn(f))
+            ui.warn(_(b"%s already tracked!\n") % uipathfn(f))
 
         return rejected
 
@@ -1813,16 +1821,16 @@
             return
         if self.dirty():
             self.ui.warn(
-                _('not removing repo %s because ' 'it has changes.\n')
+                _(b'not removing repo %s because ' b'it has changes.\n')
                 % self._relpath
             )
             return
         # we can't fully delete the repository as it may contain
         # local-only history
-        self.ui.note(_('removing subrepo %s\n') % self._relpath)
-        self._gitcommand(['config', 'core.bare', 'true'])
+        self.ui.note(_(b'removing subrepo %s\n') % self._relpath)
+        self._gitcommand([b'config', b'core.bare', b'true'])
         for f, kind in self.wvfs.readdir():
-            if f == '.git':
+            if f == b'.git':
                 continue
             if kind == stat.S_IFDIR:
                 self.wvfs.rmtree(f)
@@ -1839,11 +1847,11 @@
         # Parse git's native archive command.
         # This should be much faster than manually traversing the trees
         # and objects with many subprocess calls.
-        tarstream = self._gitcommand(['archive', revision], stream=True)
+        tarstream = self._gitcommand([b'archive', revision], stream=True)
         tar = tarfile.open(fileobj=tarstream, mode=r'r|')
         relpath = subrelpath(self)
         progress = self.ui.makeprogress(
-            _('archiving (%s)') % relpath, unit=_('files')
+            _(b'archiving (%s)') % relpath, unit=_(b'files')
         )
         progress.update(0)
         for info in tar:
@@ -1873,7 +1881,7 @@
 
         # TODO: add support for non-plain formatter (see cmdutil.cat())
         for f in match.files():
-            output = self._gitcommand(["show", "%s:%s" % (rev, f)])
+            output = self._gitcommand([b"show", b"%s:%s" % (rev, f)])
             fp = cmdutil.makefileobj(
                 self._ctx, fntemplate, pathname=self.wvfs.reljoin(prefix, f)
             )
@@ -1890,42 +1898,42 @@
         modified, added, removed = [], [], []
         self._gitupdatestat()
         if rev2:
-            command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
+            command = [b'diff-tree', b'--no-renames', b'-r', rev1, rev2]
         else:
-            command = ['diff-index', '--no-renames', rev1]
+            command = [b'diff-index', b'--no-renames', rev1]
         out = self._gitcommand(command)
-        for line in out.split('\n'):
-            tab = line.find('\t')
+        for line in out.split(b'\n'):
+            tab = line.find(b'\t')
             if tab == -1:
                 continue
             status, f = line[tab - 1 : tab], line[tab + 1 :]
-            if status == 'M':
+            if status == b'M':
                 modified.append(f)
-            elif status == 'A':
+            elif status == b'A':
                 added.append(f)
-            elif status == 'D':
+            elif status == b'D':
                 removed.append(f)
 
         deleted, unknown, ignored, clean = [], [], [], []
 
-        command = ['status', '--porcelain', '-z']
+        command = [b'status', b'--porcelain', b'-z']
         if opts.get(r'unknown'):
-            command += ['--untracked-files=all']
+            command += [b'--untracked-files=all']
         if opts.get(r'ignored'):
-            command += ['--ignored']
+            command += [b'--ignored']
         out = self._gitcommand(command)
 
         changedfiles = set()
         changedfiles.update(modified)
         changedfiles.update(added)
         changedfiles.update(removed)
-        for line in out.split('\0'):
+        for line in out.split(b'\0'):
             if not line:
                 continue
             st = line[0:2]
             # moves and copies show 2 files on one line
-            if line.find('\0') >= 0:
-                filename1, filename2 = line[3:].split('\0')
+            if line.find(b'\0') >= 0:
+                filename1, filename2 = line[3:].split(b'\0')
             else:
                 filename1 = line[3:]
                 filename2 = None
@@ -1934,14 +1942,14 @@
             if filename2:
                 changedfiles.add(filename2)
 
-            if st == '??':
+            if st == b'??':
                 unknown.append(filename1)
-            elif st == '!!':
+            elif st == b'!!':
                 ignored.append(filename1)
 
         if opts.get(r'clean'):
-            out = self._gitcommand(['ls-files'])
-            for f in out.split('\n'):
+            out = self._gitcommand([b'ls-files'])
+            for f in out.split(b'\n'):
                 if not f in changedfiles:
                     clean.append(f)
 
@@ -1952,52 +1960,52 @@
     @annotatesubrepoerror
     def diff(self, ui, diffopts, node2, match, prefix, **opts):
         node1 = self._state[1]
-        cmd = ['diff', '--no-renames']
+        cmd = [b'diff', b'--no-renames']
         if opts[r'stat']:
-            cmd.append('--stat')
+            cmd.append(b'--stat')
         else:
             # for Git, this also implies '-p'
-            cmd.append('-U%d' % diffopts.context)
+            cmd.append(b'-U%d' % diffopts.context)
 
         if diffopts.noprefix:
             cmd.extend(
-                ['--src-prefix=%s/' % prefix, '--dst-prefix=%s/' % prefix]
+                [b'--src-prefix=%s/' % prefix, b'--dst-prefix=%s/' % prefix]
             )
         else:
             cmd.extend(
-                ['--src-prefix=a/%s/' % prefix, '--dst-prefix=b/%s/' % prefix]
+                [b'--src-prefix=a/%s/' % prefix, b'--dst-prefix=b/%s/' % prefix]
             )
 
         if diffopts.ignorews:
-            cmd.append('--ignore-all-space')
+            cmd.append(b'--ignore-all-space')
         if diffopts.ignorewsamount:
-            cmd.append('--ignore-space-change')
+            cmd.append(b'--ignore-space-change')
         if (
-            self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4)
+            self._gitversion(self._gitcommand([b'--version'])) >= (1, 8, 4)
             and diffopts.ignoreblanklines
         ):
-            cmd.append('--ignore-blank-lines')
+            cmd.append(b'--ignore-blank-lines')
 
         cmd.append(node1)
         if node2:
             cmd.append(node2)
 
-        output = ""
+        output = b""
         if match.always():
-            output += self._gitcommand(cmd) + '\n'
+            output += self._gitcommand(cmd) + b'\n'
         else:
             st = self.status(node2)[:3]
             files = [f for sublist in st for f in sublist]
             for f in files:
                 if match(f):
-                    output += self._gitcommand(cmd + ['--', f]) + '\n'
+                    output += self._gitcommand(cmd + [b'--', f]) + b'\n'
 
         if output.strip():
             ui.write(output)
 
     @annotatesubrepoerror
     def revert(self, substate, *pats, **opts):
-        self.ui.status(_('reverting subrepo %s\n') % substate[0])
+        self.ui.status(_(b'reverting subrepo %s\n') % substate[0])
         if not opts.get(r'no_backup'):
             status = self.status(None)
             names = status.modified
@@ -2009,7 +2017,7 @@
                     self.ui, self._subparent, parentname
                 )
                 self.ui.note(
-                    _('saving current version of %s as %s\n')
+                    _(b'saving current version of %s as %s\n')
                     % (name, os.path.relpath(bakname))
                 )
                 util.rename(self.wvfs.join(name), bakname)
@@ -2023,7 +2031,7 @@
 
 
 types = {
-    'hg': hgsubrepo,
-    'svn': svnsubrepo,
-    'git': gitsubrepo,
+    b'hg': hgsubrepo,
+    b'svn': svnsubrepo,
+    b'git': gitsubrepo,
 }
--- a/mercurial/subrepoutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/subrepoutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -23,7 +23,7 @@
 )
 from .utils import stringutil
 
-nullstate = ('', '', 'empty')
+nullstate = (b'', b'', b'empty')
 
 
 def state(ctx, ui):
@@ -43,38 +43,38 @@
                     raise
                 # handle missing subrepo spec files as removed
                 ui.warn(
-                    _("warning: subrepo spec file \'%s\' not found\n")
+                    _(b"warning: subrepo spec file \'%s\' not found\n")
                     % repo.pathto(f)
                 )
                 return
             p.parse(f, data, sections, remap, read)
         else:
             raise error.Abort(
-                _("subrepo spec file \'%s\' not found") % repo.pathto(f)
+                _(b"subrepo spec file \'%s\' not found") % repo.pathto(f)
             )
 
-    if '.hgsub' in ctx:
-        read('.hgsub')
+    if b'.hgsub' in ctx:
+        read(b'.hgsub')
 
-    for path, src in ui.configitems('subpaths'):
-        p.set('subpaths', path, src, ui.configsource('subpaths', path))
+    for path, src in ui.configitems(b'subpaths'):
+        p.set(b'subpaths', path, src, ui.configsource(b'subpaths', path))
 
     rev = {}
-    if '.hgsubstate' in ctx:
+    if b'.hgsubstate' in ctx:
         try:
-            for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
+            for i, l in enumerate(ctx[b'.hgsubstate'].data().splitlines()):
                 l = l.lstrip()
                 if not l:
                     continue
                 try:
-                    revision, path = l.split(" ", 1)
+                    revision, path = l.split(b" ", 1)
                 except ValueError:
                     raise error.Abort(
                         _(
-                            "invalid subrepository revision "
-                            "specifier in \'%s\' line %d"
+                            b"invalid subrepository revision "
+                            b"specifier in \'%s\' line %d"
                         )
-                        % (repo.pathto('.hgsubstate'), (i + 1))
+                        % (repo.pathto(b'.hgsubstate'), (i + 1))
                     )
                 rev[path] = revision
         except IOError as err:
@@ -82,7 +82,7 @@
                 raise
 
     def remap(src):
-        for pattern, repl in p.items('subpaths'):
+        for pattern, repl in p.items(b'subpaths'):
             # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
             # does a string decode.
             repl = stringutil.escapestr(repl)
@@ -94,21 +94,21 @@
                 src = re.sub(pattern, repl, src, 1)
             except re.error as e:
                 raise error.Abort(
-                    _("bad subrepository pattern in %s: %s")
+                    _(b"bad subrepository pattern in %s: %s")
                     % (
-                        p.source('subpaths', pattern),
+                        p.source(b'subpaths', pattern),
                         stringutil.forcebytestr(e),
                     )
                 )
         return src
 
     state = {}
-    for path, src in p[''].items():
-        kind = 'hg'
-        if src.startswith('['):
-            if ']' not in src:
-                raise error.Abort(_('missing ] in subrepository source'))
-            kind, src = src.split(']', 1)
+    for path, src in p[b''].items():
+        kind = b'hg'
+        if src.startswith(b'['):
+            if b']' not in src:
+                raise error.Abort(_(b'missing ] in subrepository source'))
+            kind, src = src.split(b']', 1)
             kind = kind[1:]
             src = src.lstrip()  # strip any extra whitespace after ']'
 
@@ -116,7 +116,7 @@
             parent = _abssource(repo, abort=False)
             if parent:
                 parent = util.url(parent)
-                parent.path = posixpath.join(parent.path or '', src)
+                parent.path = posixpath.join(parent.path or b'', src)
                 parent.path = posixpath.normpath(parent.path)
                 joined = bytes(parent)
                 # Remap the full joined path and use it if it changes,
@@ -128,7 +128,7 @@
                     src = remapped
 
         src = remap(src)
-        state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
+        state[util.pconvert(path)] = (src.strip(), rev.get(path, b''), kind)
 
     return state
 
@@ -136,11 +136,11 @@
 def writestate(repo, state):
     """rewrite .hgsubstate in (outer) repo with these subrepo states"""
     lines = [
-        '%s %s\n' % (state[s][1], s)
+        b'%s %s\n' % (state[s][1], s)
         for s in sorted(state)
         if state[s][1] != nullstate[1]
     ]
-    repo.wwrite('.hgsubstate', ''.join(lines), '')
+    repo.wwrite(b'.hgsubstate', b''.join(lines), b'')
 
 
 def submerge(repo, wctx, mctx, actx, overwrite, labels=None):
@@ -153,67 +153,67 @@
     sa = actx.substate
     sm = {}
 
-    repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
+    repo.ui.debug(b"subrepo merge %s %s %s\n" % (wctx, mctx, actx))
 
-    def debug(s, msg, r=""):
+    def debug(s, msg, r=b""):
         if r:
-            r = "%s:%s:%s" % r
-        repo.ui.debug("  subrepo %s: %s %s\n" % (s, msg, r))
+            r = b"%s:%s:%s" % r
+        repo.ui.debug(b"  subrepo %s: %s %s\n" % (s, msg, r))
 
     promptssrc = filemerge.partextras(labels)
     for s, l in sorted(s1.iteritems()):
         a = sa.get(s, nullstate)
         ld = l  # local state with possible dirty flag for compares
         if wctx.sub(s).dirty():
-            ld = (l[0], l[1] + "+")
+            ld = (l[0], l[1] + b"+")
         if wctx == actx:  # overwrite
             a = ld
 
         prompts = promptssrc.copy()
-        prompts['s'] = s
+        prompts[b's'] = s
         if s in s2:
             r = s2[s]
             if ld == r or r == a:  # no change or local is newer
                 sm[s] = l
                 continue
             elif ld == a:  # other side changed
-                debug(s, "other changed, get", r)
+                debug(s, b"other changed, get", r)
                 wctx.sub(s).get(r, overwrite)
                 sm[s] = r
             elif ld[0] != r[0]:  # sources differ
-                prompts['lo'] = l[0]
-                prompts['ro'] = r[0]
+                prompts[b'lo'] = l[0]
+                prompts[b'ro'] = r[0]
                 if repo.ui.promptchoice(
                     _(
-                        ' subrepository sources for %(s)s differ\n'
-                        'you can use (l)ocal%(l)s source (%(lo)s)'
-                        ' or (r)emote%(o)s source (%(ro)s).\n'
-                        'what do you want to do?'
-                        '$$ &Local $$ &Remote'
+                        b' subrepository sources for %(s)s differ\n'
+                        b'you can use (l)ocal%(l)s source (%(lo)s)'
+                        b' or (r)emote%(o)s source (%(ro)s).\n'
+                        b'what do you want to do?'
+                        b'$$ &Local $$ &Remote'
                     )
                     % prompts,
                     0,
                 ):
-                    debug(s, "prompt changed, get", r)
+                    debug(s, b"prompt changed, get", r)
                     wctx.sub(s).get(r, overwrite)
                     sm[s] = r
             elif ld[1] == a[1]:  # local side is unchanged
-                debug(s, "other side changed, get", r)
+                debug(s, b"other side changed, get", r)
                 wctx.sub(s).get(r, overwrite)
                 sm[s] = r
             else:
-                debug(s, "both sides changed")
+                debug(s, b"both sides changed")
                 srepo = wctx.sub(s)
-                prompts['sl'] = srepo.shortid(l[1])
-                prompts['sr'] = srepo.shortid(r[1])
+                prompts[b'sl'] = srepo.shortid(l[1])
+                prompts[b'sr'] = srepo.shortid(r[1])
                 option = repo.ui.promptchoice(
                     _(
-                        ' subrepository %(s)s diverged (local revision: %(sl)s, '
-                        'remote revision: %(sr)s)\n'
-                        'you can (m)erge, keep (l)ocal%(l)s or keep '
-                        '(r)emote%(o)s.\n'
-                        'what do you want to do?'
-                        '$$ &Merge $$ &Local $$ &Remote'
+                        b' subrepository %(s)s diverged (local revision: %(sl)s, '
+                        b'remote revision: %(sr)s)\n'
+                        b'you can (m)erge, keep (l)ocal%(l)s or keep '
+                        b'(r)emote%(o)s.\n'
+                        b'what do you want to do?'
+                        b'$$ &Merge $$ &Local $$ &Remote'
                     )
                     % prompts,
                     0,
@@ -221,59 +221,59 @@
                 if option == 0:
                     wctx.sub(s).merge(r)
                     sm[s] = l
-                    debug(s, "merge with", r)
+                    debug(s, b"merge with", r)
                 elif option == 1:
                     sm[s] = l
-                    debug(s, "keep local subrepo revision", l)
+                    debug(s, b"keep local subrepo revision", l)
                 else:
                     wctx.sub(s).get(r, overwrite)
                     sm[s] = r
-                    debug(s, "get remote subrepo revision", r)
+                    debug(s, b"get remote subrepo revision", r)
         elif ld == a:  # remote removed, local unchanged
-            debug(s, "remote removed, remove")
+            debug(s, b"remote removed, remove")
             wctx.sub(s).remove()
         elif a == nullstate:  # not present in remote or ancestor
-            debug(s, "local added, keep")
+            debug(s, b"local added, keep")
             sm[s] = l
             continue
         else:
             if repo.ui.promptchoice(
                 _(
-                    ' local%(l)s changed subrepository %(s)s'
-                    ' which remote%(o)s removed\n'
-                    'use (c)hanged version or (d)elete?'
-                    '$$ &Changed $$ &Delete'
+                    b' local%(l)s changed subrepository %(s)s'
+                    b' which remote%(o)s removed\n'
+                    b'use (c)hanged version or (d)elete?'
+                    b'$$ &Changed $$ &Delete'
                 )
                 % prompts,
                 0,
             ):
-                debug(s, "prompt remove")
+                debug(s, b"prompt remove")
                 wctx.sub(s).remove()
 
     for s, r in sorted(s2.items()):
         if s in s1:
             continue
         elif s not in sa:
-            debug(s, "remote added, get", r)
+            debug(s, b"remote added, get", r)
             mctx.sub(s).get(r)
             sm[s] = r
         elif r != sa[s]:
             prompts = promptssrc.copy()
-            prompts['s'] = s
+            prompts[b's'] = s
             if (
                 repo.ui.promptchoice(
                     _(
-                        ' remote%(o)s changed subrepository %(s)s'
-                        ' which local%(l)s removed\n'
-                        'use (c)hanged version or (d)elete?'
-                        '$$ &Changed $$ &Delete'
+                        b' remote%(o)s changed subrepository %(s)s'
+                        b' which local%(l)s removed\n'
+                        b'use (c)hanged version or (d)elete?'
+                        b'$$ &Changed $$ &Delete'
                     )
                     % prompts,
                     0,
                 )
                 == 0
             ):
-                debug(s, "prompt recreate", r)
+                debug(s, b"prompt recreate", r)
                 mctx.sub(s).get(r)
                 sm[s] = r
 
@@ -297,11 +297,11 @@
     newstate = wctx.substate.copy()
 
     # only manage subrepos and .hgsubstate if .hgsub is present
-    if '.hgsub' in wctx:
+    if b'.hgsub' in wctx:
         # we'll decide whether to track this ourselves, thanks
         for c in status.modified, status.added, status.removed:
-            if '.hgsubstate' in c:
-                c.remove('.hgsubstate')
+            if b'.hgsubstate' in c:
+                c.remove(b'.hgsubstate')
 
         # compare current state to last committed state
         # build new substate based on last committed state
@@ -314,14 +314,14 @@
                     continue
                 if not force:
                     raise error.Abort(
-                        _("commit with new subrepo %s excluded") % s
+                        _(b"commit with new subrepo %s excluded") % s
                     )
             dirtyreason = wctx.sub(s).dirtyreason(True)
             if dirtyreason:
-                if not ui.configbool('ui', 'commitsubrepos'):
+                if not ui.configbool(b'ui', b'commitsubrepos'):
                     raise error.Abort(
                         dirtyreason,
-                        hint=_("use --subrepos for recursive commit"),
+                        hint=_(b"use --subrepos for recursive commit"),
                     )
                 subs.append(s)
                 commitsubs.add(s)
@@ -336,18 +336,18 @@
             r = [s for s in p.substate if s not in newstate]
             subs += [s for s in r if match(s)]
         if subs:
-            if not match('.hgsub') and '.hgsub' in (
+            if not match(b'.hgsub') and b'.hgsub' in (
                 wctx.modified() + wctx.added()
             ):
-                raise error.Abort(_("can't commit subrepos without .hgsub"))
-            status.modified.insert(0, '.hgsubstate')
+                raise error.Abort(_(b"can't commit subrepos without .hgsub"))
+            status.modified.insert(0, b'.hgsubstate')
 
-    elif '.hgsub' in status.removed:
+    elif b'.hgsub' in status.removed:
         # clean up .hgsubstate when .hgsub is removed
-        if '.hgsubstate' in wctx and '.hgsubstate' not in (
+        if b'.hgsubstate' in wctx and b'.hgsubstate' not in (
             status.modified + status.added + status.removed
         ):
-            status.removed.insert(0, '.hgsubstate')
+            status.removed.insert(0, b'.hgsubstate')
 
     return subs, commitsubs, newstate
 
@@ -355,7 +355,7 @@
 def reporelpath(repo):
     """return path to this (sub)repo as seen from outermost repo"""
     parent = repo
-    while util.safehasattr(parent, '_subparent'):
+    while util.safehasattr(parent, b'_subparent'):
         parent = parent._subparent
     return repo.root[len(pathutil.normasprefix(parent.root)) :]
 
@@ -368,7 +368,7 @@
 def _abssource(repo, push=False, abort=True):
     """return pull/push path of repo - either based on parent repo .hgsub info
     or on the top repo config. Abort or return None if no source found."""
-    if util.safehasattr(repo, '_subparent'):
+    if util.safehasattr(repo, b'_subparent'):
         source = util.url(repo._subsource)
         if source.isabs():
             return bytes(source)
@@ -376,17 +376,17 @@
         parent = _abssource(repo._subparent, push, abort=False)
         if parent:
             parent = util.url(util.pconvert(parent))
-            parent.path = posixpath.join(parent.path or '', source.path)
+            parent.path = posixpath.join(parent.path or b'', source.path)
             parent.path = posixpath.normpath(parent.path)
             return bytes(parent)
     else:  # recursion reached top repo
         path = None
-        if util.safehasattr(repo, '_subtoppath'):
+        if util.safehasattr(repo, b'_subtoppath'):
             path = repo._subtoppath
-        elif push and repo.ui.config('paths', 'default-push'):
-            path = repo.ui.config('paths', 'default-push')
-        elif repo.ui.config('paths', 'default'):
-            path = repo.ui.config('paths', 'default')
+        elif push and repo.ui.config(b'paths', b'default-push'):
+            path = repo.ui.config(b'paths', b'default-push')
+        elif repo.ui.config(b'paths', b'default'):
+            path = repo.ui.config(b'paths', b'default')
         elif repo.shared():
             # chop off the .hg component to get the default path form.  This has
             # already run through vfsmod.vfs(..., realpath=True), so it doesn't
@@ -410,7 +410,7 @@
             return path
 
     if abort:
-        raise error.Abort(_("default path for subrepository not found"))
+        raise error.Abort(_(b"default path for subrepository not found"))
 
 
 def newcommitphase(ui, ctx):
@@ -418,12 +418,12 @@
     substate = getattr(ctx, "substate", None)
     if not substate:
         return commitphase
-    check = ui.config('phases', 'checksubrepos')
-    if check not in ('ignore', 'follow', 'abort'):
+    check = ui.config(b'phases', b'checksubrepos')
+    if check not in (b'ignore', b'follow', b'abort'):
         raise error.Abort(
-            _('invalid phases.checksubrepos configuration: %s') % check
+            _(b'invalid phases.checksubrepos configuration: %s') % check
         )
-    if check == 'ignore':
+    if check == b'ignore':
         return commitphase
     maxphase = phases.public
     maxsub = None
@@ -434,11 +434,11 @@
             maxphase = subphase
             maxsub = s
     if commitphase < maxphase:
-        if check == 'abort':
+        if check == b'abort':
             raise error.Abort(
                 _(
-                    "can't commit in %s phase"
-                    " conflicting %s from subrepository %s"
+                    b"can't commit in %s phase"
+                    b" conflicting %s from subrepository %s"
                 )
                 % (
                     phases.phasenames[commitphase],
@@ -448,8 +448,8 @@
             )
         ui.warn(
             _(
-                "warning: changes are committed in"
-                " %s phase from subrepository %s\n"
+                b"warning: changes are committed in"
+                b" %s phase from subrepository %s\n"
             )
             % (phases.phasenames[maxphase], maxsub)
         )
--- a/mercurial/tagmerge.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/tagmerge.py	Sun Oct 06 09:48:39 2019 -0400
@@ -86,7 +86,7 @@
 hexnullid = hex(nullid)
 
 
-def readtagsformerge(ui, repo, lines, fn='', keeplinenums=False):
+def readtagsformerge(ui, repo, lines, fn=b'', keeplinenums=False):
     '''read the .hgtags file into a structure that is suitable for merging
 
     Depending on the keeplinenums flag, clear the line numbers associated
@@ -154,7 +154,7 @@
     # convert the grouped merged tags dict into a format that resembles the
     # final .hgtags file (i.e. a list of blocks of 'node tag' pairs)
     def taglist2string(tlist, tname):
-        return '\n'.join(['%s %s' % (hexnode, tname) for hexnode in tlist])
+        return b'\n'.join([b'%s %s' % (hexnode, tname) for hexnode in tlist])
 
     finaltags = []
     for tname, tags in mergedtags.items():
@@ -170,8 +170,8 @@
 
     # finally we can join the sorted groups to get the final contents of the
     # merged .hgtags file, and then write it to disk
-    mergedtagstring = '\n'.join([tags for rank, tags in finaltags if tags])
-    fcd.write(mergedtagstring + '\n', fcd.flags())
+    mergedtagstring = b'\n'.join([tags for rank, tags in finaltags if tags])
+    fcd.write(mergedtagstring + b'\n', fcd.flags())
 
 
 def singletagmerge(p1nodes, p2nodes):
@@ -229,13 +229,13 @@
     # read the p1, p2 and base tags
     # only keep the line numbers for the p1 tags
     p1tags = readtagsformerge(
-        ui, repo, fcd.data().splitlines(), fn="p1 tags", keeplinenums=True
+        ui, repo, fcd.data().splitlines(), fn=b"p1 tags", keeplinenums=True
     )
     p2tags = readtagsformerge(
-        ui, repo, fco.data().splitlines(), fn="p2 tags", keeplinenums=False
+        ui, repo, fco.data().splitlines(), fn=b"p2 tags", keeplinenums=False
     )
     basetags = readtagsformerge(
-        ui, repo, fca.data().splitlines(), fn="base tags", keeplinenums=False
+        ui, repo, fca.data().splitlines(), fn=b"base tags", keeplinenums=False
     )
 
     # recover the list of "lost tags" (i.e. those that were found on the base
@@ -267,13 +267,13 @@
         numconflicts = len(conflictedtags)
         ui.warn(
             _(
-                'automatic .hgtags merge failed\n'
-                'the following %d tags are in conflict: %s\n'
+                b'automatic .hgtags merge failed\n'
+                b'the following %d tags are in conflict: %s\n'
             )
-            % (numconflicts, ', '.join(sorted(conflictedtags)))
+            % (numconflicts, b', '.join(sorted(conflictedtags)))
         )
         return True, 1
 
     writemergedtags(fcd, mergedtags)
-    ui.note(_('.hgtags merged successfully\n'))
+    ui.note(_(b'.hgtags merged successfully\n'))
     return False, 0
--- a/mercurial/tags.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/tags.py	Sun Oct 06 09:48:39 2019 -0400
@@ -155,10 +155,10 @@
 
     See documentation of difftags output for details about the input.
     """
-    add = '+A %s %s\n'
-    remove = '-R %s %s\n'
-    updateold = '-M %s %s\n'
-    updatenew = '+M %s %s\n'
+    add = b'+A %s %s\n'
+    remove = b'-R %s %s\n'
+    updateold = b'-M %s %s\n'
+    updatenew = b'+M %s %s\n'
     for tag, old, new in difflist:
         # translate to hex
         if old is not None:
@@ -195,7 +195,7 @@
     for head in reversed(heads):  # oldest to newest
         assert (
             head in repo.changelog.nodemap
-        ), "tag cache returned bogus head %s" % short(head)
+        ), b"tag cache returned bogus head %s" % short(head)
     fnodes = _filterfnodes(tagfnode, reversed(heads))
     alltags = _tagsfromfnodes(ui, repo, fnodes)
 
@@ -231,7 +231,7 @@
     fctx = None
     for fnode in fnodes:
         if fctx is None:
-            fctx = repo.filectx('.hgtags', fileid=fnode)
+            fctx = repo.filectx(b'.hgtags', fileid=fnode)
         else:
             fctx = fctx.filectx(fnode)
         filetags = _readtags(ui, repo, fctx.data().splitlines(), fctx)
@@ -242,7 +242,7 @@
 def readlocaltags(ui, repo, alltags, tagtypes):
     '''Read local tags in repo. Update alltags and tagtypes.'''
     try:
-        data = repo.vfs.read("localtags")
+        data = repo.vfs.read(b"localtags")
     except IOError as inst:
         if inst.errno != errno.ENOENT:
             raise
@@ -251,7 +251,7 @@
     # localtags is in the local encoding; re-encode to UTF-8 on
     # input for consistency with the rest of this module.
     filetags = _readtags(
-        ui, repo, data.splitlines(), "localtags", recode=encoding.fromlocal
+        ui, repo, data.splitlines(), b"localtags", recode=encoding.fromlocal
     )
 
     # remove tags pointing to invalid nodes
@@ -262,7 +262,7 @@
         except (LookupError, ValueError):
             del filetags[t]
 
-    _updatetags(filetags, alltags, 'local', tagtypes)
+    _updatetags(filetags, alltags, b'local', tagtypes)
 
 
 def _readtaghist(ui, repo, lines, fn, recode=None, calcnodelines=False):
@@ -289,16 +289,16 @@
     count = 0
 
     def dbg(msg):
-        ui.debug("%s, line %d: %s\n" % (fn, count, msg))
+        ui.debug(b"%s, line %d: %s\n" % (fn, count, msg))
 
     for nline, line in enumerate(lines):
         count += 1
         if not line:
             continue
         try:
-            (nodehex, name) = line.split(" ", 1)
+            (nodehex, name) = line.split(b" ", 1)
         except ValueError:
-            dbg("cannot parse entry")
+            dbg(b"cannot parse entry")
             continue
         name = name.strip()
         if recode:
@@ -306,7 +306,7 @@
         try:
             nodebin = bin(nodehex)
         except TypeError:
-            dbg("node '%s' is not well formed" % nodehex)
+            dbg(b"node '%s' is not well formed" % nodehex)
             continue
 
         # update filetags
@@ -382,9 +382,9 @@
 
 def _filename(repo):
     """name of a tagcache file for a given repo or repoview"""
-    filename = 'tags2'
+    filename = b'tags2'
     if repo.filtername:
-        filename = '%s-%s' % (filename, repo.filtername)
+        filename = b'%s-%s' % (filename, repo.filtername)
     return filename
 
 
@@ -407,7 +407,7 @@
     info from each returned head. (See findglobaltags().)
     '''
     try:
-        cachefile = repo.cachevfs(_filename(repo), 'r')
+        cachefile = repo.cachevfs(_filename(repo), b'r')
         # force reading the file for static-http
         cachelines = iter(cachefile)
     except IOError:
@@ -467,7 +467,7 @@
 
     # N.B. in case 4 (nodes destroyed), "new head" really means "newly
     # exposed".
-    if not len(repo.file('.hgtags')):
+    if not len(repo.file(b'.hgtags')):
         # No tags have ever been committed, so we can avoid a
         # potentially expensive search.
         return ([], {}, valid, None, True)
@@ -502,8 +502,8 @@
 
     duration = util.timer() - starttime
     ui.log(
-        'tagscache',
-        '%d/%d cache hits/lookups in %0.4f seconds\n',
+        b'tagscache',
+        b'%d/%d cache hits/lookups in %0.4f seconds\n',
         fnodescache.hitcount,
         fnodescache.lookupcount,
         duration,
@@ -514,21 +514,23 @@
 def _writetagcache(ui, repo, valid, cachetags):
     filename = _filename(repo)
     try:
-        cachefile = repo.cachevfs(filename, 'w', atomictemp=True)
+        cachefile = repo.cachevfs(filename, b'w', atomictemp=True)
     except (OSError, IOError):
         return
 
     ui.log(
-        'tagscache',
-        'writing .hg/cache/%s with %d tags\n',
+        b'tagscache',
+        b'writing .hg/cache/%s with %d tags\n',
         filename,
         len(cachetags),
     )
 
     if valid[2]:
-        cachefile.write('%d %s %s\n' % (valid[0], hex(valid[1]), hex(valid[2])))
+        cachefile.write(
+            b'%d %s %s\n' % (valid[0], hex(valid[1]), hex(valid[2]))
+        )
     else:
-        cachefile.write('%d %s\n' % (valid[0], hex(valid[1])))
+        cachefile.write(b'%d %s\n' % (valid[0], hex(valid[1])))
 
     # Tag names in the cache are in UTF-8 -- which is the whole reason
     # we keep them in UTF-8 throughout this module.  If we converted
@@ -536,8 +538,8 @@
     # the cache.
     for (name, (node, hist)) in sorted(cachetags.iteritems()):
         for n in hist:
-            cachefile.write("%s %s\n" % (hex(n), name))
-        cachefile.write("%s %s\n" % (hex(node), name))
+            cachefile.write(b"%s %s\n" % (hex(n), name))
+        cachefile.write(b"%s %s\n" % (hex(node), name))
 
     try:
         cachefile.close()
@@ -567,11 +569,11 @@
     date: date tuple to use if committing'''
 
     if not local:
-        m = matchmod.exact(['.hgtags'])
+        m = matchmod.exact([b'.hgtags'])
         if any(repo.status(match=m, unknown=True, ignored=True)):
             raise error.Abort(
-                _('working copy of .hgtags is changed'),
-                hint=_('please commit .hgtags manually'),
+                _(b'working copy of .hgtags is changed'),
+                hint=_(b'please commit .hgtags manually'),
             )
 
     with repo.wlock():
@@ -587,17 +589,17 @@
 
     branches = repo.branchmap()
     for name in names:
-        repo.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
+        repo.hook(b'pretag', throw=True, node=hex(node), tag=name, local=local)
         if name in branches:
             repo.ui.warn(
-                _("warning: tag %s conflicts with existing" " branch name\n")
+                _(b"warning: tag %s conflicts with existing" b" branch name\n")
                 % name
             )
 
     def writetags(fp, names, munge, prevtags):
         fp.seek(0, io.SEEK_END)
-        if prevtags and not prevtags.endswith('\n'):
-            fp.write('\n')
+        if prevtags and not prevtags.endswith(b'\n'):
+            fp.write(b'\n')
         for name in names:
             if munge:
                 m = munge(name)
@@ -606,31 +608,31 @@
 
             if repo._tagscache.tagtypes and name in repo._tagscache.tagtypes:
                 old = repo.tags().get(name, nullid)
-                fp.write('%s %s\n' % (hex(old), m))
-            fp.write('%s %s\n' % (hex(node), m))
+                fp.write(b'%s %s\n' % (hex(old), m))
+            fp.write(b'%s %s\n' % (hex(node), m))
         fp.close()
 
-    prevtags = ''
+    prevtags = b''
     if local:
         try:
-            fp = repo.vfs('localtags', 'r+')
+            fp = repo.vfs(b'localtags', b'r+')
         except IOError:
-            fp = repo.vfs('localtags', 'a')
+            fp = repo.vfs(b'localtags', b'a')
         else:
             prevtags = fp.read()
 
         # local tags are stored in the current charset
         writetags(fp, names, None, prevtags)
         for name in names:
-            repo.hook('tag', node=hex(node), tag=name, local=local)
+            repo.hook(b'tag', node=hex(node), tag=name, local=local)
         return
 
     try:
-        fp = repo.wvfs('.hgtags', 'rb+')
+        fp = repo.wvfs(b'.hgtags', b'rb+')
     except IOError as e:
         if e.errno != errno.ENOENT:
             raise
-        fp = repo.wvfs('.hgtags', 'ab')
+        fp = repo.wvfs(b'.hgtags', b'ab')
     else:
         prevtags = fp.read()
 
@@ -641,23 +643,23 @@
 
     repo.invalidatecaches()
 
-    if '.hgtags' not in repo.dirstate:
-        repo[None].add(['.hgtags'])
+    if b'.hgtags' not in repo.dirstate:
+        repo[None].add([b'.hgtags'])
 
-    m = matchmod.exact(['.hgtags'])
+    m = matchmod.exact([b'.hgtags'])
     tagnode = repo.commit(
         message, user, date, extra=extra, match=m, editor=editor
     )
 
     for name in names:
-        repo.hook('tag', node=hex(node), tag=name, local=local)
+        repo.hook(b'tag', node=hex(node), tag=name, local=local)
 
     return tagnode
 
 
-_fnodescachefile = 'hgtagsfnodes1'
+_fnodescachefile = b'hgtagsfnodes1'
 _fnodesrecsize = 4 + 20  # changeset fragment + filenode
-_fnodesmissingrec = '\xff' * 24
+_fnodesmissingrec = b'\xff' * 24
 
 
 class hgtagsfnodescache(object):
@@ -692,7 +694,7 @@
         try:
             data = repo.cachevfs.read(_fnodescachefile)
         except (OSError, IOError):
-            data = ""
+            data = b""
         self._raw = bytearray(data)
 
         # The end state of self._raw is an array that is of the exact length
@@ -709,7 +711,7 @@
 
         if rawlen < wantedlen:
             self._dirtyoffset = rawlen
-            self._raw.extend('\xff' * (wantedlen - rawlen))
+            self._raw.extend(b'\xff' * (wantedlen - rawlen))
         elif rawlen > wantedlen:
             # There's no easy way to truncate array instances. This seems
             # slightly less evil than copying a potentially large array slice.
@@ -737,7 +739,7 @@
         self.lookupcount += 1
 
         offset = rev * _fnodesrecsize
-        record = '%s' % self._raw[offset : offset + _fnodesrecsize]
+        record = b'%s' % self._raw[offset : offset + _fnodesrecsize]
         properprefix = node[0:4]
 
         # Validate and return existing entry.
@@ -772,13 +774,13 @@
                 p1fnode = None
         if p1fnode is not None:
             mctx = ctx.manifestctx()
-            fnode = mctx.readfast().get('.hgtags')
+            fnode = mctx.readfast().get(b'.hgtags')
             if fnode is None:
                 fnode = p1fnode
         if fnode is None:
             # Populate missing entry.
             try:
-                fnode = ctx.filenode('.hgtags')
+                fnode = ctx.filenode(b'.hgtags')
             except error.LookupError:
                 # No .hgtags file on this revision.
                 fnode = nullid
@@ -823,14 +825,14 @@
             lock = repo.wlock(wait=False)
         except error.LockError:
             repo.ui.log(
-                'tagscache',
-                'not writing .hg/cache/%s because '
-                'lock cannot be acquired\n' % _fnodescachefile,
+                b'tagscache',
+                b'not writing .hg/cache/%s because '
+                b'lock cannot be acquired\n' % _fnodescachefile,
             )
             return
 
         try:
-            f = repo.cachevfs.open(_fnodescachefile, 'ab')
+            f = repo.cachevfs.open(_fnodescachefile, b'ab')
             try:
                 # if the file has been truncated
                 actualoffset = f.tell()
@@ -840,8 +842,8 @@
                 f.seek(self._dirtyoffset)
                 f.truncate()
                 repo.ui.log(
-                    'tagscache',
-                    'writing %d bytes to cache/%s\n'
+                    b'tagscache',
+                    b'writing %d bytes to cache/%s\n'
                     % (len(data), _fnodescachefile),
                 )
                 f.write(data)
@@ -850,8 +852,8 @@
                 f.close()
         except (IOError, OSError) as inst:
             repo.ui.log(
-                'tagscache',
-                "couldn't write cache/%s: %s\n"
+                b'tagscache',
+                b"couldn't write cache/%s: %s\n"
                 % (_fnodescachefile, stringutil.forcebytestr(inst)),
             )
         finally:
--- a/mercurial/templatefilters.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/templatefilters.py	Sun Oct 06 09:48:39 2019 -0400
@@ -43,26 +43,26 @@
 templatefilter = registrar.templatefilter(filters)
 
 
-@templatefilter('addbreaks', intype=bytes)
+@templatefilter(b'addbreaks', intype=bytes)
 def addbreaks(text):
     """Any text. Add an XHTML "<br />" tag before the end of
     every line except the last.
     """
-    return text.replace('\n', '<br/>\n')
+    return text.replace(b'\n', b'<br/>\n')
 
 
 agescales = [
-    ("year", 3600 * 24 * 365, 'Y'),
-    ("month", 3600 * 24 * 30, 'M'),
-    ("week", 3600 * 24 * 7, 'W'),
-    ("day", 3600 * 24, 'd'),
-    ("hour", 3600, 'h'),
-    ("minute", 60, 'm'),
-    ("second", 1, 's'),
+    (b"year", 3600 * 24 * 365, b'Y'),
+    (b"month", 3600 * 24 * 30, b'M'),
+    (b"week", 3600 * 24 * 7, b'W'),
+    (b"day", 3600 * 24, b'd'),
+    (b"hour", 3600, b'h'),
+    (b"minute", 60, b'm'),
+    (b"second", 1, b's'),
 ]
 
 
-@templatefilter('age', intype=templateutil.date)
+@templatefilter(b'age', intype=templateutil.date)
 def age(date, abbrev=False):
     """Date. Returns a human-readable date/time difference between the
     given date/time and the current date/time.
@@ -71,12 +71,12 @@
     def plural(t, c):
         if c == 1:
             return t
-        return t + "s"
+        return t + b"s"
 
     def fmt(t, c, a):
         if abbrev:
-            return "%d%s" % (c, a)
-        return "%d %s" % (c, plural(t, c))
+            return b"%d%s" % (c, a)
+        return b"%d %s" % (c, plural(t, c))
 
     now = time.time()
     then = date[0]
@@ -85,7 +85,7 @@
         future = True
         delta = max(1, int(then - now))
         if delta > agescales[0][1] * 30:
-            return 'in the distant future'
+            return b'in the distant future'
     else:
         delta = max(1, int(now - then))
         if delta > agescales[0][1] * 2:
@@ -95,11 +95,11 @@
         n = delta // s
         if n >= 2 or s == 1:
             if future:
-                return '%s from now' % fmt(t, n, a)
-            return '%s ago' % fmt(t, n, a)
+                return b'%s from now' % fmt(t, n, a)
+            return b'%s ago' % fmt(t, n, a)
 
 
-@templatefilter('basename', intype=bytes)
+@templatefilter(b'basename', intype=bytes)
 def basename(path):
     """Any text. Treats the text as a path, and returns the last
     component of the path after splitting by the path separator.
@@ -108,13 +108,13 @@
     return os.path.basename(path)
 
 
-@templatefilter('cbor')
+@templatefilter(b'cbor')
 def cbor(obj):
     """Any object. Serializes the object to CBOR bytes."""
     return b''.join(cborutil.streamencode(obj))
 
 
-@templatefilter('commondir')
+@templatefilter(b'commondir')
 def commondir(filelist):
     """List of text. Treats each list item as file name with /
     as path separator and returns the longest common directory
@@ -142,30 +142,30 @@
 
     try:
         if not filelist:
-            return ""
-        dirlist = [f.lstrip('/').split('/')[:-1] for f in filelist]
+            return b""
+        dirlist = [f.lstrip(b'/').split(b'/')[:-1] for f in filelist]
         if len(dirlist) == 1:
-            return '/'.join(dirlist[0])
+            return b'/'.join(dirlist[0])
         a = min(dirlist)
         b = max(dirlist)
         # The common prefix of a and b is shared with all
         # elements of the list since Python sorts lexicographical
         # and [1, x] after [1].
-        return '/'.join(common(a, b))
+        return b'/'.join(common(a, b))
     except TypeError:
-        raise error.ParseError(_('argument is not a list of text'))
+        raise error.ParseError(_(b'argument is not a list of text'))
 
 
-@templatefilter('count')
+@templatefilter(b'count')
 def count(i):
     """List or text. Returns the length as an integer."""
     try:
         return len(i)
     except TypeError:
-        raise error.ParseError(_('not countable'))
+        raise error.ParseError(_(b'not countable'))
 
 
-@templatefilter('dirname', intype=bytes)
+@templatefilter(b'dirname', intype=bytes)
 def dirname(path):
     """Any text. Treats the text as a path, and strips the last
     component of the path after splitting by the path separator.
@@ -173,23 +173,23 @@
     return os.path.dirname(path)
 
 
-@templatefilter('domain', intype=bytes)
+@templatefilter(b'domain', intype=bytes)
 def domain(author):
     """Any text. Finds the first string that looks like an email
     address, and extracts just the domain component. Example: ``User
     <user@example.com>`` becomes ``example.com``.
     """
-    f = author.find('@')
+    f = author.find(b'@')
     if f == -1:
-        return ''
+        return b''
     author = author[f + 1 :]
-    f = author.find('>')
+    f = author.find(b'>')
     if f >= 0:
         author = author[:f]
     return author
 
 
-@templatefilter('email', intype=bytes)
+@templatefilter(b'email', intype=bytes)
 def email(text):
     """Any text. Extracts the first string that looks like an email
     address. Example: ``User <user@example.com>`` becomes
@@ -198,23 +198,23 @@
     return stringutil.email(text)
 
 
-@templatefilter('escape', intype=bytes)
+@templatefilter(b'escape', intype=bytes)
 def escape(text):
     """Any text. Replaces the special XML/XHTML characters "&", "<"
     and ">" with XML entities, and filters out NUL characters.
     """
-    return url.escape(text.replace('\0', ''), True)
+    return url.escape(text.replace(b'\0', b''), True)
 
 
 para_re = None
 space_re = None
 
 
-def fill(text, width, initindent='', hangindent=''):
+def fill(text, width, initindent=b'', hangindent=b''):
     '''fill many paragraphs with optional indentation.'''
     global para_re, space_re
     if para_re is None:
-        para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M)
+        para_re = re.compile(b'(\n\n|\n\\s*[-*]\\s*)', re.M)
         space_re = re.compile(br'  +')
 
     def findparas():
@@ -234,10 +234,10 @@
             yield text[start : m.start(0)], m.group(1)
             start = m.end(1)
 
-    return "".join(
+    return b"".join(
         [
             stringutil.wrap(
-                space_re.sub(' ', stringutil.wrap(para, width)),
+                space_re.sub(b' ', stringutil.wrap(para, width)),
                 width,
                 initindent,
                 hangindent,
@@ -248,28 +248,28 @@
     )
 
 
-@templatefilter('fill68', intype=bytes)
+@templatefilter(b'fill68', intype=bytes)
 def fill68(text):
     """Any text. Wraps the text to fit in 68 columns."""
     return fill(text, 68)
 
 
-@templatefilter('fill76', intype=bytes)
+@templatefilter(b'fill76', intype=bytes)
 def fill76(text):
     """Any text. Wraps the text to fit in 76 columns."""
     return fill(text, 76)
 
 
-@templatefilter('firstline', intype=bytes)
+@templatefilter(b'firstline', intype=bytes)
 def firstline(text):
     """Any text. Returns the first line of text."""
     try:
-        return text.splitlines(True)[0].rstrip('\r\n')
+        return text.splitlines(True)[0].rstrip(b'\r\n')
     except IndexError:
-        return ''
+        return b''
 
 
-@templatefilter('hex', intype=bytes)
+@templatefilter(b'hex', intype=bytes)
 def hexfilter(text):
     """Any text. Convert a binary Mercurial node identifier into
     its long hexadecimal representation.
@@ -277,36 +277,36 @@
     return node.hex(text)
 
 
-@templatefilter('hgdate', intype=templateutil.date)
+@templatefilter(b'hgdate', intype=templateutil.date)
 def hgdate(text):
     """Date. Returns the date as a pair of numbers: "1157407993
     25200" (Unix timestamp, timezone offset).
     """
-    return "%d %d" % text
+    return b"%d %d" % text
 
 
-@templatefilter('isodate', intype=templateutil.date)
+@templatefilter(b'isodate', intype=templateutil.date)
 def isodate(text):
     """Date. Returns the date in ISO 8601 format: "2009-08-18 13:00
     +0200".
     """
-    return dateutil.datestr(text, '%Y-%m-%d %H:%M %1%2')
+    return dateutil.datestr(text, b'%Y-%m-%d %H:%M %1%2')
 
 
-@templatefilter('isodatesec', intype=templateutil.date)
+@templatefilter(b'isodatesec', intype=templateutil.date)
 def isodatesec(text):
     """Date. Returns the date in ISO 8601 format, including
     seconds: "2009-08-18 13:00:13 +0200". See also the rfc3339date
     filter.
     """
-    return dateutil.datestr(text, '%Y-%m-%d %H:%M:%S %1%2')
+    return dateutil.datestr(text, b'%Y-%m-%d %H:%M:%S %1%2')
 
 
 def indent(text, prefix):
     '''indent each non-empty line of text after first with prefix.'''
     lines = text.splitlines()
     num_lines = len(lines)
-    endswithnewline = text[-1:] == '\n'
+    endswithnewline = text[-1:] == b'\n'
 
     def indenter():
         for i in pycompat.xrange(num_lines):
@@ -315,72 +315,72 @@
                 yield prefix
             yield l
             if i < num_lines - 1 or endswithnewline:
-                yield '\n'
+                yield b'\n'
 
-    return "".join(indenter())
+    return b"".join(indenter())
 
 
-@templatefilter('json')
+@templatefilter(b'json')
 def json(obj, paranoid=True):
     """Any object. Serializes the object to a JSON formatted text."""
     if obj is None:
-        return 'null'
+        return b'null'
     elif obj is False:
-        return 'false'
+        return b'false'
     elif obj is True:
-        return 'true'
+        return b'true'
     elif isinstance(obj, (int, long, float)):
         return pycompat.bytestr(obj)
     elif isinstance(obj, bytes):
-        return '"%s"' % encoding.jsonescape(obj, paranoid=paranoid)
+        return b'"%s"' % encoding.jsonescape(obj, paranoid=paranoid)
     elif isinstance(obj, type(u'')):
         raise error.ProgrammingError(
-            'Mercurial only does output with bytes: %r' % obj
+            b'Mercurial only does output with bytes: %r' % obj
         )
-    elif util.safehasattr(obj, 'keys'):
+    elif util.safehasattr(obj, b'keys'):
         out = [
-            '"%s": %s'
+            b'"%s": %s'
             % (encoding.jsonescape(k, paranoid=paranoid), json(v, paranoid))
             for k, v in sorted(obj.iteritems())
         ]
-        return '{' + ', '.join(out) + '}'
-    elif util.safehasattr(obj, '__iter__'):
+        return b'{' + b', '.join(out) + b'}'
+    elif util.safehasattr(obj, b'__iter__'):
         out = [json(i, paranoid) for i in obj]
-        return '[' + ', '.join(out) + ']'
-    raise error.ProgrammingError('cannot encode %r' % obj)
+        return b'[' + b', '.join(out) + b']'
+    raise error.ProgrammingError(b'cannot encode %r' % obj)
 
 
-@templatefilter('lower', intype=bytes)
+@templatefilter(b'lower', intype=bytes)
 def lower(text):
     """Any text. Converts the text to lowercase."""
     return encoding.lower(text)
 
 
-@templatefilter('nonempty', intype=bytes)
+@templatefilter(b'nonempty', intype=bytes)
 def nonempty(text):
     """Any text. Returns '(none)' if the string is empty."""
-    return text or "(none)"
+    return text or b"(none)"
 
 
-@templatefilter('obfuscate', intype=bytes)
+@templatefilter(b'obfuscate', intype=bytes)
 def obfuscate(text):
     """Any text. Returns the input text rendered as a sequence of
     XML entities.
     """
     text = unicode(text, pycompat.sysstr(encoding.encoding), r'replace')
-    return ''.join(['&#%d;' % ord(c) for c in text])
+    return b''.join([b'&#%d;' % ord(c) for c in text])
 
 
-@templatefilter('permissions', intype=bytes)
+@templatefilter(b'permissions', intype=bytes)
 def permissions(flags):
-    if "l" in flags:
-        return "lrwxrwxrwx"
-    if "x" in flags:
-        return "-rwxr-xr-x"
-    return "-rw-r--r--"
+    if b"l" in flags:
+        return b"lrwxrwxrwx"
+    if b"x" in flags:
+        return b"-rwxr-xr-x"
+    return b"-rw-r--r--"
 
 
-@templatefilter('person', intype=bytes)
+@templatefilter(b'person', intype=bytes)
 def person(author):
     """Any text. Returns the name before an email address,
     interpreting it as per RFC 5322.
@@ -388,32 +388,32 @@
     return stringutil.person(author)
 
 
-@templatefilter('revescape', intype=bytes)
+@templatefilter(b'revescape', intype=bytes)
 def revescape(text):
     """Any text. Escapes all "special" characters, except @.
     Forward slashes are escaped twice to prevent web servers from prematurely
     unescaping them. For example, "@foo bar/baz" becomes "@foo%20bar%252Fbaz".
     """
-    return urlreq.quote(text, safe='/@').replace('/', '%252F')
+    return urlreq.quote(text, safe=b'/@').replace(b'/', b'%252F')
 
 
-@templatefilter('rfc3339date', intype=templateutil.date)
+@templatefilter(b'rfc3339date', intype=templateutil.date)
 def rfc3339date(text):
     """Date. Returns a date using the Internet date format
     specified in RFC 3339: "2009-08-18T13:00:13+02:00".
     """
-    return dateutil.datestr(text, "%Y-%m-%dT%H:%M:%S%1:%2")
+    return dateutil.datestr(text, b"%Y-%m-%dT%H:%M:%S%1:%2")
 
 
-@templatefilter('rfc822date', intype=templateutil.date)
+@templatefilter(b'rfc822date', intype=templateutil.date)
 def rfc822date(text):
     """Date. Returns a date using the same format used in email
     headers: "Tue, 18 Aug 2009 13:00:13 +0200".
     """
-    return dateutil.datestr(text, "%a, %d %b %Y %H:%M:%S %1%2")
+    return dateutil.datestr(text, b"%a, %d %b %Y %H:%M:%S %1%2")
 
 
-@templatefilter('short', intype=bytes)
+@templatefilter(b'short', intype=bytes)
 def short(text):
     """Changeset hash. Returns the short form of a changeset hash,
     i.e. a 12 hexadecimal digit string.
@@ -421,7 +421,7 @@
     return text[:12]
 
 
-@templatefilter('shortbisect', intype=bytes)
+@templatefilter(b'shortbisect', intype=bytes)
 def shortbisect(label):
     """Any text. Treats `label` as a bisection status, and
     returns a single-character representing the status (G: good, B: bad,
@@ -430,33 +430,33 @@
     """
     if label:
         return label[0:1].upper()
-    return ' '
+    return b' '
 
 
-@templatefilter('shortdate', intype=templateutil.date)
+@templatefilter(b'shortdate', intype=templateutil.date)
 def shortdate(text):
     """Date. Returns a date like "2006-09-18"."""
     return dateutil.shortdate(text)
 
 
-@templatefilter('slashpath', intype=bytes)
+@templatefilter(b'slashpath', intype=bytes)
 def slashpath(path):
     """Any text. Replaces the native path separator with slash."""
     return util.pconvert(path)
 
 
-@templatefilter('splitlines', intype=bytes)
+@templatefilter(b'splitlines', intype=bytes)
 def splitlines(text):
     """Any text. Split text into a list of lines."""
-    return templateutil.hybridlist(text.splitlines(), name='line')
+    return templateutil.hybridlist(text.splitlines(), name=b'line')
 
 
-@templatefilter('stringescape', intype=bytes)
+@templatefilter(b'stringescape', intype=bytes)
 def stringescape(text):
     return stringutil.escapestr(text)
 
 
-@templatefilter('stringify', intype=bytes)
+@templatefilter(b'stringify', intype=bytes)
 def stringify(thing):
     """Any type. Turns the value into text by converting values into
     text and concatenating them.
@@ -464,33 +464,33 @@
     return thing  # coerced by the intype
 
 
-@templatefilter('stripdir', intype=bytes)
+@templatefilter(b'stripdir', intype=bytes)
 def stripdir(text):
     """Treat the text as path and strip a directory level, if
     possible. For example, "foo" and "foo/bar" becomes "foo".
     """
     dir = os.path.dirname(text)
-    if dir == "":
+    if dir == b"":
         return os.path.basename(text)
     else:
         return dir
 
 
-@templatefilter('tabindent', intype=bytes)
+@templatefilter(b'tabindent', intype=bytes)
 def tabindent(text):
     """Any text. Returns the text, with every non-empty line
     except the first starting with a tab character.
     """
-    return indent(text, '\t')
+    return indent(text, b'\t')
 
 
-@templatefilter('upper', intype=bytes)
+@templatefilter(b'upper', intype=bytes)
 def upper(text):
     """Any text. Converts the text to uppercase."""
     return encoding.upper(text)
 
 
-@templatefilter('urlescape', intype=bytes)
+@templatefilter(b'urlescape', intype=bytes)
 def urlescape(text):
     """Any text. Escapes all "special" characters. For example,
     "foo bar" becomes "foo%20bar".
@@ -498,35 +498,35 @@
     return urlreq.quote(text)
 
 
-@templatefilter('user', intype=bytes)
+@templatefilter(b'user', intype=bytes)
 def userfilter(text):
     """Any text. Returns a short representation of a user name or email
     address."""
     return stringutil.shortuser(text)
 
 
-@templatefilter('emailuser', intype=bytes)
+@templatefilter(b'emailuser', intype=bytes)
 def emailuser(text):
     """Any text. Returns the user portion of an email address."""
     return stringutil.emailuser(text)
 
 
-@templatefilter('utf8', intype=bytes)
+@templatefilter(b'utf8', intype=bytes)
 def utf8(text):
     """Any text. Converts from the local character encoding to UTF-8."""
     return encoding.fromlocal(text)
 
 
-@templatefilter('xmlescape', intype=bytes)
+@templatefilter(b'xmlescape', intype=bytes)
 def xmlescape(text):
     text = (
-        text.replace('&', '&amp;')
-        .replace('<', '&lt;')
-        .replace('>', '&gt;')
-        .replace('"', '&quot;')
-        .replace("'", '&#39;')
+        text.replace(b'&', b'&amp;')
+        .replace(b'<', b'&lt;')
+        .replace(b'>', b'&gt;')
+        .replace(b'"', b'&quot;')
+        .replace(b"'", b'&#39;')
     )  # &apos; invalid in HTML
-    return re.sub('[\x00-\x08\x0B\x0C\x0E-\x1F]', ' ', text)
+    return re.sub(b'[\x00-\x08\x0B\x0C\x0E-\x1F]', b' ', text)
 
 
 def websub(text, websubtable):
--- a/mercurial/templatefuncs.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/templatefuncs.py	Sun Oct 06 09:48:39 2019 -0400
@@ -50,21 +50,21 @@
 templatefunc = registrar.templatefunc(funcs)
 
 
-@templatefunc('date(date[, fmt])')
+@templatefunc(b'date(date[, fmt])')
 def date(context, mapping, args):
     """Format a date. See :hg:`help dates` for formatting
     strings. The default is a Unix date format, including the timezone:
     "Mon Sep 04 15:13:13 2006 0700"."""
     if not (1 <= len(args) <= 2):
         # i18n: "date" is a keyword
-        raise error.ParseError(_("date expects one or two arguments"))
+        raise error.ParseError(_(b"date expects one or two arguments"))
 
     date = evaldate(
         context,
         mapping,
         args[0],
         # i18n: "date" is a keyword
-        _("date expects a date information"),
+        _(b"date expects a date information"),
     )
     fmt = None
     if len(args) == 2:
@@ -75,36 +75,36 @@
         return dateutil.datestr(date, fmt)
 
 
-@templatefunc('dict([[key=]value...])', argspec='*args **kwargs')
+@templatefunc(b'dict([[key=]value...])', argspec=b'*args **kwargs')
 def dict_(context, mapping, args):
     """Construct a dict from key-value pairs. A key may be omitted if
     a value expression can provide an unambiguous name."""
     data = util.sortdict()
 
-    for v in args['args']:
+    for v in args[b'args']:
         k = templateutil.findsymbolicname(v)
         if not k:
-            raise error.ParseError(_('dict key cannot be inferred'))
-        if k in data or k in args['kwargs']:
-            raise error.ParseError(_("duplicated dict key '%s' inferred") % k)
+            raise error.ParseError(_(b'dict key cannot be inferred'))
+        if k in data or k in args[b'kwargs']:
+            raise error.ParseError(_(b"duplicated dict key '%s' inferred") % k)
         data[k] = evalfuncarg(context, mapping, v)
 
     data.update(
         (k, evalfuncarg(context, mapping, v))
-        for k, v in args['kwargs'].iteritems()
+        for k, v in args[b'kwargs'].iteritems()
     )
     return templateutil.hybriddict(data)
 
 
 @templatefunc(
-    'diff([includepattern [, excludepattern]])', requires={'ctx', 'ui'}
+    b'diff([includepattern [, excludepattern]])', requires={b'ctx', b'ui'}
 )
 def diff(context, mapping, args):
     """Show a diff, optionally
     specifying files to include or exclude."""
     if len(args) > 2:
         # i18n: "diff" is a keyword
-        raise error.ParseError(_("diff expects zero, one, or two arguments"))
+        raise error.ParseError(_(b"diff expects zero, one, or two arguments"))
 
     def getpatterns(i):
         if i < len(args):
@@ -113,76 +113,78 @@
                 return [s]
         return []
 
-    ctx = context.resource(mapping, 'ctx')
-    ui = context.resource(mapping, 'ui')
+    ctx = context.resource(mapping, b'ctx')
+    ui = context.resource(mapping, b'ui')
     diffopts = diffutil.diffallopts(ui)
     chunks = ctx.diff(
         match=ctx.match([], getpatterns(0), getpatterns(1)), opts=diffopts
     )
 
-    return ''.join(chunks)
+    return b''.join(chunks)
 
 
-@templatefunc('extdata(source)', argspec='source', requires={'ctx', 'cache'})
+@templatefunc(
+    b'extdata(source)', argspec=b'source', requires={b'ctx', b'cache'}
+)
 def extdata(context, mapping, args):
     """Show a text read from the specified extdata source. (EXPERIMENTAL)"""
-    if 'source' not in args:
+    if b'source' not in args:
         # i18n: "extdata" is a keyword
-        raise error.ParseError(_('extdata expects one argument'))
+        raise error.ParseError(_(b'extdata expects one argument'))
 
-    source = evalstring(context, mapping, args['source'])
+    source = evalstring(context, mapping, args[b'source'])
     if not source:
-        sym = templateutil.findsymbolicname(args['source'])
+        sym = templateutil.findsymbolicname(args[b'source'])
         if sym:
             raise error.ParseError(
-                _('empty data source specified'),
-                hint=_("did you mean extdata('%s')?") % sym,
+                _(b'empty data source specified'),
+                hint=_(b"did you mean extdata('%s')?") % sym,
             )
         else:
-            raise error.ParseError(_('empty data source specified'))
-    cache = context.resource(mapping, 'cache').setdefault('extdata', {})
-    ctx = context.resource(mapping, 'ctx')
+            raise error.ParseError(_(b'empty data source specified'))
+    cache = context.resource(mapping, b'cache').setdefault(b'extdata', {})
+    ctx = context.resource(mapping, b'ctx')
     if source in cache:
         data = cache[source]
     else:
         data = cache[source] = scmutil.extdatasource(ctx.repo(), source)
-    return data.get(ctx.rev(), '')
+    return data.get(ctx.rev(), b'')
 
 
-@templatefunc('files(pattern)', requires={'ctx'})
+@templatefunc(b'files(pattern)', requires={b'ctx'})
 def files(context, mapping, args):
     """All files of the current changeset matching the pattern. See
     :hg:`help patterns`."""
     if not len(args) == 1:
         # i18n: "files" is a keyword
-        raise error.ParseError(_("files expects one argument"))
+        raise error.ParseError(_(b"files expects one argument"))
 
     raw = evalstring(context, mapping, args[0])
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     m = ctx.match([raw])
     files = list(ctx.matches(m))
-    return templateutil.compatfileslist(context, mapping, "file", files)
+    return templateutil.compatfileslist(context, mapping, b"file", files)
 
 
-@templatefunc('fill(text[, width[, initialident[, hangindent]]])')
+@templatefunc(b'fill(text[, width[, initialident[, hangindent]]])')
 def fill(context, mapping, args):
     """Fill many
     paragraphs with optional indentation. See the "fill" filter."""
     if not (1 <= len(args) <= 4):
         # i18n: "fill" is a keyword
-        raise error.ParseError(_("fill expects one to four arguments"))
+        raise error.ParseError(_(b"fill expects one to four arguments"))
 
     text = evalstring(context, mapping, args[0])
     width = 76
-    initindent = ''
-    hangindent = ''
+    initindent = b''
+    hangindent = b''
     if 2 <= len(args) <= 4:
         width = evalinteger(
             context,
             mapping,
             args[1],
             # i18n: "fill" is a keyword
-            _("fill expects an integer width"),
+            _(b"fill expects an integer width"),
         )
         try:
             initindent = evalstring(context, mapping, args[2])
@@ -193,13 +195,13 @@
     return templatefilters.fill(text, width, initindent, hangindent)
 
 
-@templatefunc('filter(iterable[, expr])')
+@templatefunc(b'filter(iterable[, expr])')
 def filter_(context, mapping, args):
     """Remove empty elements from a list or a dict. If expr specified, it's
     applied to each element to test emptiness."""
     if not (1 <= len(args) <= 2):
         # i18n: "filter" is a keyword
-        raise error.ParseError(_("filter expects one or two arguments"))
+        raise error.ParseError(_(b"filter expects one or two arguments"))
     iterable = evalwrapped(context, mapping, args[0])
     if len(args) == 1:
 
@@ -210,79 +212,79 @@
 
         def select(w):
             if not isinstance(w, templateutil.mappable):
-                raise error.ParseError(_("not filterable by expression"))
+                raise error.ParseError(_(b"not filterable by expression"))
             lm = context.overlaymap(mapping, w.tomap(context))
             return evalboolean(context, lm, args[1])
 
     return iterable.filter(context, mapping, select)
 
 
-@templatefunc('formatnode(node)', requires={'ui'})
+@templatefunc(b'formatnode(node)', requires={b'ui'})
 def formatnode(context, mapping, args):
     """Obtain the preferred form of a changeset hash. (DEPRECATED)"""
     if len(args) != 1:
         # i18n: "formatnode" is a keyword
-        raise error.ParseError(_("formatnode expects one argument"))
+        raise error.ParseError(_(b"formatnode expects one argument"))
 
-    ui = context.resource(mapping, 'ui')
+    ui = context.resource(mapping, b'ui')
     node = evalstring(context, mapping, args[0])
     if ui.debugflag:
         return node
     return templatefilters.short(node)
 
 
-@templatefunc('mailmap(author)', requires={'repo', 'cache'})
+@templatefunc(b'mailmap(author)', requires={b'repo', b'cache'})
 def mailmap(context, mapping, args):
     """Return the author, updated according to the value
     set in the .mailmap file"""
     if len(args) != 1:
-        raise error.ParseError(_("mailmap expects one argument"))
+        raise error.ParseError(_(b"mailmap expects one argument"))
 
     author = evalstring(context, mapping, args[0])
 
-    cache = context.resource(mapping, 'cache')
-    repo = context.resource(mapping, 'repo')
+    cache = context.resource(mapping, b'cache')
+    repo = context.resource(mapping, b'repo')
 
-    if 'mailmap' not in cache:
-        data = repo.wvfs.tryread('.mailmap')
-        cache['mailmap'] = stringutil.parsemailmap(data)
+    if b'mailmap' not in cache:
+        data = repo.wvfs.tryread(b'.mailmap')
+        cache[b'mailmap'] = stringutil.parsemailmap(data)
 
-    return stringutil.mapname(cache['mailmap'], author)
+    return stringutil.mapname(cache[b'mailmap'], author)
 
 
 @templatefunc(
-    'pad(text, width[, fillchar=\' \'[, left=False[, truncate=False]]])',
-    argspec='text width fillchar left truncate',
+    b'pad(text, width[, fillchar=\' \'[, left=False[, truncate=False]]])',
+    argspec=b'text width fillchar left truncate',
 )
 def pad(context, mapping, args):
     """Pad text with a
     fill character."""
-    if 'text' not in args or 'width' not in args:
+    if b'text' not in args or b'width' not in args:
         # i18n: "pad" is a keyword
-        raise error.ParseError(_("pad() expects two to four arguments"))
+        raise error.ParseError(_(b"pad() expects two to four arguments"))
 
     width = evalinteger(
         context,
         mapping,
-        args['width'],
+        args[b'width'],
         # i18n: "pad" is a keyword
-        _("pad() expects an integer width"),
+        _(b"pad() expects an integer width"),
     )
 
-    text = evalstring(context, mapping, args['text'])
+    text = evalstring(context, mapping, args[b'text'])
 
     truncate = False
     left = False
-    fillchar = ' '
-    if 'fillchar' in args:
-        fillchar = evalstring(context, mapping, args['fillchar'])
+    fillchar = b' '
+    if b'fillchar' in args:
+        fillchar = evalstring(context, mapping, args[b'fillchar'])
         if len(color.stripeffects(fillchar)) != 1:
             # i18n: "pad" is a keyword
-            raise error.ParseError(_("pad() expects a single fill character"))
-    if 'left' in args:
-        left = evalboolean(context, mapping, args['left'])
-    if 'truncate' in args:
-        truncate = evalboolean(context, mapping, args['truncate'])
+            raise error.ParseError(_(b"pad() expects a single fill character"))
+    if b'left' in args:
+        left = evalboolean(context, mapping, args[b'left'])
+    if b'truncate' in args:
+        truncate = evalboolean(context, mapping, args[b'truncate'])
 
     fillwidth = width - encoding.colwidth(color.stripeffects(text))
     if fillwidth < 0 and truncate:
@@ -295,7 +297,7 @@
         return text + fillchar * fillwidth
 
 
-@templatefunc('indent(text, indentchars[, firstline])')
+@templatefunc(b'indent(text, indentchars[, firstline])')
 def indent(context, mapping, args):
     """Indents all non-empty lines
     with the characters given in the indentchars string. An optional
@@ -303,7 +305,7 @@
     if present."""
     if not (2 <= len(args) <= 3):
         # i18n: "indent" is a keyword
-        raise error.ParseError(_("indent() expects two or three arguments"))
+        raise error.ParseError(_(b"indent() expects two or three arguments"))
 
     text = evalstring(context, mapping, args[0])
     indent = evalstring(context, mapping, args[1])
@@ -317,14 +319,14 @@
     return templatefilters.indent(firstline + text, indent)
 
 
-@templatefunc('get(dict, key)')
+@templatefunc(b'get(dict, key)')
 def get(context, mapping, args):
     """Get an attribute/key from an object. Some keywords
     are complex types. This function allows you to obtain the value of an
     attribute on these types."""
     if len(args) != 2:
         # i18n: "get" is a keyword
-        raise error.ParseError(_("get() expects two arguments"))
+        raise error.ParseError(_(b"get() expects two arguments"))
 
     dictarg = evalwrapped(context, mapping, args[0])
     key = evalrawexp(context, mapping, args[1])
@@ -332,34 +334,34 @@
         return dictarg.getmember(context, mapping, key)
     except error.ParseError as err:
         # i18n: "get" is a keyword
-        hint = _("get() expects a dict as first argument")
+        hint = _(b"get() expects a dict as first argument")
         raise error.ParseError(bytes(err), hint=hint)
 
 
-@templatefunc('config(section, name[, default])', requires={'ui'})
+@templatefunc(b'config(section, name[, default])', requires={b'ui'})
 def config(context, mapping, args):
     """Returns the requested hgrc config option as a string."""
-    fn = context.resource(mapping, 'ui').config
+    fn = context.resource(mapping, b'ui').config
     return _config(context, mapping, args, fn, evalstring)
 
 
-@templatefunc('configbool(section, name[, default])', requires={'ui'})
+@templatefunc(b'configbool(section, name[, default])', requires={b'ui'})
 def configbool(context, mapping, args):
     """Returns the requested hgrc config option as a boolean."""
-    fn = context.resource(mapping, 'ui').configbool
+    fn = context.resource(mapping, b'ui').configbool
     return _config(context, mapping, args, fn, evalboolean)
 
 
-@templatefunc('configint(section, name[, default])', requires={'ui'})
+@templatefunc(b'configint(section, name[, default])', requires={b'ui'})
 def configint(context, mapping, args):
     """Returns the requested hgrc config option as an integer."""
-    fn = context.resource(mapping, 'ui').configint
+    fn = context.resource(mapping, b'ui').configint
     return _config(context, mapping, args, fn, evalinteger)
 
 
 def _config(context, mapping, args, configfn, defaultfn):
     if not (2 <= len(args) <= 3):
-        raise error.ParseError(_("config expects two or three arguments"))
+        raise error.ParseError(_(b"config expects two or three arguments"))
 
     # The config option can come from any section, though we specifically
     # reserve the [templateconfig] section for dynamically defining options
@@ -373,13 +375,13 @@
         return configfn(section, name)
 
 
-@templatefunc('if(expr, then[, else])')
+@templatefunc(b'if(expr, then[, else])')
 def if_(context, mapping, args):
     """Conditionally execute based on the result of
     an expression."""
     if not (2 <= len(args) <= 3):
         # i18n: "if" is a keyword
-        raise error.ParseError(_("if expects two or three arguments"))
+        raise error.ParseError(_(b"if expects two or three arguments"))
 
     test = evalboolean(context, mapping, args[0])
     if test:
@@ -388,13 +390,13 @@
         return evalrawexp(context, mapping, args[2])
 
 
-@templatefunc('ifcontains(needle, haystack, then[, else])')
+@templatefunc(b'ifcontains(needle, haystack, then[, else])')
 def ifcontains(context, mapping, args):
     """Conditionally execute based
     on whether the item "needle" is in "haystack"."""
     if not (3 <= len(args) <= 4):
         # i18n: "ifcontains" is a keyword
-        raise error.ParseError(_("ifcontains expects three or four arguments"))
+        raise error.ParseError(_(b"ifcontains expects three or four arguments"))
 
     haystack = evalwrapped(context, mapping, args[1])
     try:
@@ -409,13 +411,13 @@
         return evalrawexp(context, mapping, args[3])
 
 
-@templatefunc('ifeq(expr1, expr2, then[, else])')
+@templatefunc(b'ifeq(expr1, expr2, then[, else])')
 def ifeq(context, mapping, args):
     """Conditionally execute based on
     whether 2 items are equivalent."""
     if not (3 <= len(args) <= 4):
         # i18n: "ifeq" is a keyword
-        raise error.ParseError(_("ifeq expects three or four arguments"))
+        raise error.ParseError(_(b"ifeq expects three or four arguments"))
 
     test = evalstring(context, mapping, args[0])
     match = evalstring(context, mapping, args[1])
@@ -425,30 +427,30 @@
         return evalrawexp(context, mapping, args[3])
 
 
-@templatefunc('join(list, sep)')
+@templatefunc(b'join(list, sep)')
 def join(context, mapping, args):
     """Join items in a list with a delimiter."""
     if not (1 <= len(args) <= 2):
         # i18n: "join" is a keyword
-        raise error.ParseError(_("join expects one or two arguments"))
+        raise error.ParseError(_(b"join expects one or two arguments"))
 
     joinset = evalwrapped(context, mapping, args[0])
-    joiner = " "
+    joiner = b" "
     if len(args) > 1:
         joiner = evalstring(context, mapping, args[1])
     return joinset.join(context, mapping, joiner)
 
 
-@templatefunc('label(label, expr)', requires={'ui'})
+@templatefunc(b'label(label, expr)', requires={b'ui'})
 def label(context, mapping, args):
     """Apply a label to generated content. Content with
     a label applied can result in additional post-processing, such as
     automatic colorization."""
     if len(args) != 2:
         # i18n: "label" is a keyword
-        raise error.ParseError(_("label expects two arguments"))
+        raise error.ParseError(_(b"label expects two arguments"))
 
-    ui = context.resource(mapping, 'ui')
+    ui = context.resource(mapping, b'ui')
     thing = evalstring(context, mapping, args[1])
     # preserve unknown symbol as literal so effects like 'red', 'bold',
     # etc. don't need to be quoted
@@ -457,7 +459,7 @@
     return ui.label(thing, label)
 
 
-@templatefunc('latesttag([pattern])')
+@templatefunc(b'latesttag([pattern])')
 def latesttag(context, mapping, args):
     """The global tags matching the given pattern on the
     most recent globally tagged ancestor of this changeset.
@@ -467,7 +469,7 @@
     """
     if len(args) > 1:
         # i18n: "latesttag" is a keyword
-        raise error.ParseError(_("latesttag expects at most one argument"))
+        raise error.ParseError(_(b"latesttag expects at most one argument"))
 
     pattern = None
     if len(args) == 1:
@@ -475,20 +477,20 @@
     return templatekw.showlatesttags(context, mapping, pattern)
 
 
-@templatefunc('localdate(date[, tz])')
+@templatefunc(b'localdate(date[, tz])')
 def localdate(context, mapping, args):
     """Converts a date to the specified timezone.
     The default is local date."""
     if not (1 <= len(args) <= 2):
         # i18n: "localdate" is a keyword
-        raise error.ParseError(_("localdate expects one or two arguments"))
+        raise error.ParseError(_(b"localdate expects one or two arguments"))
 
     date = evaldate(
         context,
         mapping,
         args[0],
         # i18n: "localdate" is a keyword
-        _("localdate expects a date information"),
+        _(b"localdate expects a date information"),
     )
     if len(args) >= 2:
         tzoffset = None
@@ -502,50 +504,50 @@
                 tzoffset = int(tz)
             except (TypeError, ValueError):
                 # i18n: "localdate" is a keyword
-                raise error.ParseError(_("localdate expects a timezone"))
+                raise error.ParseError(_(b"localdate expects a timezone"))
     else:
         tzoffset = dateutil.makedate()[1]
     return templateutil.date((date[0], tzoffset))
 
 
-@templatefunc('max(iterable)')
+@templatefunc(b'max(iterable)')
 def max_(context, mapping, args, **kwargs):
     """Return the max of an iterable"""
     if len(args) != 1:
         # i18n: "max" is a keyword
-        raise error.ParseError(_("max expects one argument"))
+        raise error.ParseError(_(b"max expects one argument"))
 
     iterable = evalwrapped(context, mapping, args[0])
     try:
         return iterable.getmax(context, mapping)
     except error.ParseError as err:
         # i18n: "max" is a keyword
-        hint = _("max first argument should be an iterable")
+        hint = _(b"max first argument should be an iterable")
         raise error.ParseError(bytes(err), hint=hint)
 
 
-@templatefunc('min(iterable)')
+@templatefunc(b'min(iterable)')
 def min_(context, mapping, args, **kwargs):
     """Return the min of an iterable"""
     if len(args) != 1:
         # i18n: "min" is a keyword
-        raise error.ParseError(_("min expects one argument"))
+        raise error.ParseError(_(b"min expects one argument"))
 
     iterable = evalwrapped(context, mapping, args[0])
     try:
         return iterable.getmin(context, mapping)
     except error.ParseError as err:
         # i18n: "min" is a keyword
-        hint = _("min first argument should be an iterable")
+        hint = _(b"min first argument should be an iterable")
         raise error.ParseError(bytes(err), hint=hint)
 
 
-@templatefunc('mod(a, b)')
+@templatefunc(b'mod(a, b)')
 def mod(context, mapping, args):
     """Calculate a mod b such that a / b + a mod b == a"""
     if not len(args) == 2:
         # i18n: "mod" is a keyword
-        raise error.ParseError(_("mod expects two arguments"))
+        raise error.ParseError(_(b"mod expects two arguments"))
 
     func = lambda a, b: a % b
     return templateutil.runarithmetic(
@@ -553,69 +555,70 @@
     )
 
 
-@templatefunc('obsfateoperations(markers)')
+@templatefunc(b'obsfateoperations(markers)')
 def obsfateoperations(context, mapping, args):
     """Compute obsfate related information based on markers (EXPERIMENTAL)"""
     if len(args) != 1:
         # i18n: "obsfateoperations" is a keyword
-        raise error.ParseError(_("obsfateoperations expects one argument"))
+        raise error.ParseError(_(b"obsfateoperations expects one argument"))
 
     markers = evalfuncarg(context, mapping, args[0])
 
     try:
         data = obsutil.markersoperations(markers)
-        return templateutil.hybridlist(data, name='operation')
+        return templateutil.hybridlist(data, name=b'operation')
     except (TypeError, KeyError):
         # i18n: "obsfateoperations" is a keyword
-        errmsg = _("obsfateoperations first argument should be an iterable")
+        errmsg = _(b"obsfateoperations first argument should be an iterable")
         raise error.ParseError(errmsg)
 
 
-@templatefunc('obsfatedate(markers)')
+@templatefunc(b'obsfatedate(markers)')
 def obsfatedate(context, mapping, args):
     """Compute obsfate related information based on markers (EXPERIMENTAL)"""
     if len(args) != 1:
         # i18n: "obsfatedate" is a keyword
-        raise error.ParseError(_("obsfatedate expects one argument"))
+        raise error.ParseError(_(b"obsfatedate expects one argument"))
 
     markers = evalfuncarg(context, mapping, args[0])
 
     try:
         # TODO: maybe this has to be a wrapped list of date wrappers?
         data = obsutil.markersdates(markers)
-        return templateutil.hybridlist(data, name='date', fmt='%d %d')
+        return templateutil.hybridlist(data, name=b'date', fmt=b'%d %d')
     except (TypeError, KeyError):
         # i18n: "obsfatedate" is a keyword
-        errmsg = _("obsfatedate first argument should be an iterable")
+        errmsg = _(b"obsfatedate first argument should be an iterable")
         raise error.ParseError(errmsg)
 
 
-@templatefunc('obsfateusers(markers)')
+@templatefunc(b'obsfateusers(markers)')
 def obsfateusers(context, mapping, args):
     """Compute obsfate related information based on markers (EXPERIMENTAL)"""
     if len(args) != 1:
         # i18n: "obsfateusers" is a keyword
-        raise error.ParseError(_("obsfateusers expects one argument"))
+        raise error.ParseError(_(b"obsfateusers expects one argument"))
 
     markers = evalfuncarg(context, mapping, args[0])
 
     try:
         data = obsutil.markersusers(markers)
-        return templateutil.hybridlist(data, name='user')
+        return templateutil.hybridlist(data, name=b'user')
     except (TypeError, KeyError, ValueError):
         # i18n: "obsfateusers" is a keyword
         msg = _(
-            "obsfateusers first argument should be an iterable of " "obsmakers"
+            b"obsfateusers first argument should be an iterable of "
+            b"obsmakers"
         )
         raise error.ParseError(msg)
 
 
-@templatefunc('obsfateverb(successors, markers)')
+@templatefunc(b'obsfateverb(successors, markers)')
 def obsfateverb(context, mapping, args):
     """Compute obsfate related information based on successors (EXPERIMENTAL)"""
     if len(args) != 2:
         # i18n: "obsfateverb" is a keyword
-        raise error.ParseError(_("obsfateverb expects two arguments"))
+        raise error.ParseError(_(b"obsfateverb expects two arguments"))
 
     successors = evalfuncarg(context, mapping, args[0])
     markers = evalfuncarg(context, mapping, args[1])
@@ -624,33 +627,33 @@
         return obsutil.obsfateverb(successors, markers)
     except TypeError:
         # i18n: "obsfateverb" is a keyword
-        errmsg = _("obsfateverb first argument should be countable")
+        errmsg = _(b"obsfateverb first argument should be countable")
         raise error.ParseError(errmsg)
 
 
-@templatefunc('relpath(path)', requires={'repo'})
+@templatefunc(b'relpath(path)', requires={b'repo'})
 def relpath(context, mapping, args):
     """Convert a repository-absolute path into a filesystem path relative to
     the current working directory."""
     if len(args) != 1:
         # i18n: "relpath" is a keyword
-        raise error.ParseError(_("relpath expects one argument"))
+        raise error.ParseError(_(b"relpath expects one argument"))
 
-    repo = context.resource(mapping, 'repo')
+    repo = context.resource(mapping, b'repo')
     path = evalstring(context, mapping, args[0])
     return repo.pathto(path)
 
 
-@templatefunc('revset(query[, formatargs...])', requires={'repo', 'cache'})
+@templatefunc(b'revset(query[, formatargs...])', requires={b'repo', b'cache'})
 def revset(context, mapping, args):
     """Execute a revision set query. See
     :hg:`help revset`."""
     if not len(args) > 0:
         # i18n: "revset" is a keyword
-        raise error.ParseError(_("revset expects one or more arguments"))
+        raise error.ParseError(_(b"revset expects one or more arguments"))
 
     raw = evalstring(context, mapping, args[0])
-    repo = context.resource(mapping, 'repo')
+    repo = context.resource(mapping, b'repo')
 
     def query(expr):
         m = revsetmod.match(repo.ui, expr, lookup=revsetmod.lookupfn(repo))
@@ -660,30 +663,30 @@
         formatargs = [evalfuncarg(context, mapping, a) for a in args[1:]]
         revs = query(revsetlang.formatspec(raw, *formatargs))
     else:
-        cache = context.resource(mapping, 'cache')
-        revsetcache = cache.setdefault("revsetcache", {})
+        cache = context.resource(mapping, b'cache')
+        revsetcache = cache.setdefault(b"revsetcache", {})
         if raw in revsetcache:
             revs = revsetcache[raw]
         else:
             revs = query(raw)
             revsetcache[raw] = revs
-    return templatekw.showrevslist(context, mapping, "revision", revs)
+    return templatekw.showrevslist(context, mapping, b"revision", revs)
 
 
-@templatefunc('rstdoc(text, style)')
+@templatefunc(b'rstdoc(text, style)')
 def rstdoc(context, mapping, args):
     """Format reStructuredText."""
     if len(args) != 2:
         # i18n: "rstdoc" is a keyword
-        raise error.ParseError(_("rstdoc expects two arguments"))
+        raise error.ParseError(_(b"rstdoc expects two arguments"))
 
     text = evalstring(context, mapping, args[0])
     style = evalstring(context, mapping, args[1])
 
-    return minirst.format(text, style=style, keep=['verbose'])
+    return minirst.format(text, style=style, keep=[b'verbose'])
 
 
-@templatefunc('search(pattern, text)')
+@templatefunc(b'search(pattern, text)')
 def search(context, mapping, args):
     """Look for the first text matching the regular expression pattern.
     Groups are accessible as ``{1}``, ``{2}``, ... in %-mapped template."""
@@ -707,7 +710,7 @@
             # i18n: "search" is a keyword
             _(b'invalid group %(group)s in search pattern: %(pat)s')
             % {
-                b'group': b', '.join("'%s'" % g for g in sorted(badgroups)),
+                b'group': b', '.join(b"'%s'" % g for g in sorted(badgroups)),
                 b'pat': pat,
             }
         )
@@ -722,16 +725,16 @@
     return templateutil.mappingdict(lm, tmpl=b'{0}')
 
 
-@templatefunc('separate(sep, args...)', argspec='sep *args')
+@templatefunc(b'separate(sep, args...)', argspec=b'sep *args')
 def separate(context, mapping, args):
     """Add a separator between non-empty arguments."""
-    if 'sep' not in args:
+    if b'sep' not in args:
         # i18n: "separate" is a keyword
-        raise error.ParseError(_("separate expects at least one argument"))
+        raise error.ParseError(_(b"separate expects at least one argument"))
 
-    sep = evalstring(context, mapping, args['sep'])
+    sep = evalstring(context, mapping, args[b'sep'])
     first = True
-    for arg in args['args']:
+    for arg in args[b'args']:
         argstr = evalstring(context, mapping, arg)
         if not argstr:
             continue
@@ -742,13 +745,13 @@
         yield argstr
 
 
-@templatefunc('shortest(node, minlength=4)', requires={'repo', 'cache'})
+@templatefunc(b'shortest(node, minlength=4)', requires={b'repo', b'cache'})
 def shortest(context, mapping, args):
     """Obtain the shortest representation of
     a node."""
     if not (1 <= len(args) <= 2):
         # i18n: "shortest" is a keyword
-        raise error.ParseError(_("shortest() expects one or two arguments"))
+        raise error.ParseError(_(b"shortest() expects one or two arguments"))
 
     hexnode = evalstring(context, mapping, args[0])
 
@@ -759,10 +762,10 @@
             mapping,
             args[1],
             # i18n: "shortest" is a keyword
-            _("shortest() expects an integer minlength"),
+            _(b"shortest() expects an integer minlength"),
         )
 
-    repo = context.resource(mapping, 'repo')
+    repo = context.resource(mapping, b'repo')
     if len(hexnode) > 40:
         return hexnode
     elif len(hexnode) == 40:
@@ -779,20 +782,20 @@
             return hexnode
         if not node:
             return hexnode
-    cache = context.resource(mapping, 'cache')
+    cache = context.resource(mapping, b'cache')
     try:
         return scmutil.shortesthexnodeidprefix(repo, node, minlength, cache)
     except error.RepoLookupError:
         return hexnode
 
 
-@templatefunc('strip(text[, chars])')
+@templatefunc(b'strip(text[, chars])')
 def strip(context, mapping, args):
     """Strip characters from a string. By default,
     strips all leading and trailing whitespace."""
     if not (1 <= len(args) <= 2):
         # i18n: "strip" is a keyword
-        raise error.ParseError(_("strip expects one or two arguments"))
+        raise error.ParseError(_(b"strip expects one or two arguments"))
 
     text = evalstring(context, mapping, args[0])
     if len(args) == 2:
@@ -801,13 +804,13 @@
     return text.strip()
 
 
-@templatefunc('sub(pattern, replacement, expression)')
+@templatefunc(b'sub(pattern, replacement, expression)')
 def sub(context, mapping, args):
     """Perform text substitution
     using regular expressions."""
     if len(args) != 3:
         # i18n: "sub" is a keyword
-        raise error.ParseError(_("sub expects three arguments"))
+        raise error.ParseError(_(b"sub expects three arguments"))
 
     pat = evalstring(context, mapping, args[0])
     rpl = evalstring(context, mapping, args[1])
@@ -816,36 +819,36 @@
         patre = re.compile(pat)
     except re.error:
         # i18n: "sub" is a keyword
-        raise error.ParseError(_("sub got an invalid pattern: %s") % pat)
+        raise error.ParseError(_(b"sub got an invalid pattern: %s") % pat)
     try:
         yield patre.sub(rpl, src)
     except re.error:
         # i18n: "sub" is a keyword
-        raise error.ParseError(_("sub got an invalid replacement: %s") % rpl)
+        raise error.ParseError(_(b"sub got an invalid replacement: %s") % rpl)
 
 
-@templatefunc('startswith(pattern, text)')
+@templatefunc(b'startswith(pattern, text)')
 def startswith(context, mapping, args):
     """Returns the value from the "text" argument
     if it begins with the content from the "pattern" argument."""
     if len(args) != 2:
         # i18n: "startswith" is a keyword
-        raise error.ParseError(_("startswith expects two arguments"))
+        raise error.ParseError(_(b"startswith expects two arguments"))
 
     patn = evalstring(context, mapping, args[0])
     text = evalstring(context, mapping, args[1])
     if text.startswith(patn):
         return text
-    return ''
+    return b''
 
 
-@templatefunc('word(number, text[, separator])')
+@templatefunc(b'word(number, text[, separator])')
 def word(context, mapping, args):
     """Return the nth word from a string."""
     if not (2 <= len(args) <= 3):
         # i18n: "word" is a keyword
         raise error.ParseError(
-            _("word expects two or three arguments, got %d") % len(args)
+            _(b"word expects two or three arguments, got %d") % len(args)
         )
 
     num = evalinteger(
@@ -853,7 +856,7 @@
         mapping,
         args[0],
         # i18n: "word" is a keyword
-        _("word expects an integer index"),
+        _(b"word expects an integer index"),
     )
     text = evalstring(context, mapping, args[1])
     if len(args) == 3:
@@ -863,7 +866,7 @@
 
     tokens = text.split(splitter)
     if num >= len(tokens) or num < -len(tokens):
-        return ''
+        return b''
     else:
         return tokens[num]
 
--- a/mercurial/templatekw.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/templatekw.py	Sun Oct 06 09:48:39 2019 -0400
@@ -41,13 +41,13 @@
 
 def getlatesttags(context, mapping, pattern=None):
     '''return date, distance and name for the latest tag of rev'''
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
-    cache = context.resource(mapping, 'cache')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
+    cache = context.resource(mapping, b'cache')
 
-    cachename = 'latesttags'
+    cachename = b'latesttags'
     if pattern is not None:
-        cachename += '-' + pattern
+        cachename += b'-' + pattern
         match = stringutil.stringmatcher(pattern)[2]
     else:
         match = util.always
@@ -55,7 +55,7 @@
     if cachename not in cache:
         # Cache mapping from rev to a tuple with tag date, tag
         # distance and tag name
-        cache[cachename] = {-1: (0, 0, ['null'])}
+        cache[cachename] = {-1: (0, 0, [b'null'])}
     latesttags = cache[cachename]
 
     rev = ctx.rev()
@@ -68,7 +68,7 @@
         tags = [
             t
             for t in ctx.tags()
-            if (repo.tagtype(t) and repo.tagtype(t) != 'local' and match(t))
+            if (repo.tagtype(t) and repo.tagtype(t) != b'local' and match(t))
         ]
         if tags:
             latesttags[rev] = ctx.date()[0], 0, [t for t in sorted(tags)]
@@ -87,10 +87,10 @@
                         if ctx.rev() is None:
                             # only() doesn't support wdir
                             prevs = [c.rev() for c in ctx.parents()]
-                            changes = repo.revs('only(%ld, %s)', prevs, tag)
+                            changes = repo.revs(b'only(%ld, %s)', prevs, tag)
                             changessincetag = len(changes) + 1
                         else:
-                            changes = repo.revs('only(%d, %s)', ctx.rev(), tag)
+                            changes = repo.revs(b'only(%d, %s)', ctx.rev(), tag)
                             changessincetag = len(changes)
                         # Smallest number of changes since tag wins. Date is
                         # used as tiebreaker.
@@ -113,143 +113,145 @@
     _ = pycompat.identity  # temporarily disable gettext
     # i18n: column positioning for "hg log"
     columns = _(
-        'bookmark:    %s\n'
-        'branch:      %s\n'
-        'changeset:   %s\n'
-        'copies:      %s\n'
-        'date:        %s\n'
-        'extra:       %s=%s\n'
-        'files+:      %s\n'
-        'files-:      %s\n'
-        'files:       %s\n'
-        'instability: %s\n'
-        'manifest:    %s\n'
-        'obsolete:    %s\n'
-        'parent:      %s\n'
-        'phase:       %s\n'
-        'summary:     %s\n'
-        'tag:         %s\n'
-        'user:        %s\n'
+        b'bookmark:    %s\n'
+        b'branch:      %s\n'
+        b'changeset:   %s\n'
+        b'copies:      %s\n'
+        b'date:        %s\n'
+        b'extra:       %s=%s\n'
+        b'files+:      %s\n'
+        b'files-:      %s\n'
+        b'files:       %s\n'
+        b'instability: %s\n'
+        b'manifest:    %s\n'
+        b'obsolete:    %s\n'
+        b'parent:      %s\n'
+        b'phase:       %s\n'
+        b'summary:     %s\n'
+        b'tag:         %s\n'
+        b'user:        %s\n'
     )
     return dict(
         zip(
-            [s.split(':', 1)[0] for s in columns.splitlines()],
+            [s.split(b':', 1)[0] for s in columns.splitlines()],
             i18n._(columns).splitlines(True),
         )
     )
 
 
 # basic internal templates
-_changeidtmpl = '{rev}:{node|formatnode}'
+_changeidtmpl = b'{rev}:{node|formatnode}'
 
 # default templates internally used for rendering of lists
 defaulttempl = {
-    'parent': _changeidtmpl + ' ',
-    'manifest': _changeidtmpl,
-    'file_copy': '{name} ({source})',
-    'envvar': '{key}={value}',
-    'extra': '{key}={value|stringescape}',
+    b'parent': _changeidtmpl + b' ',
+    b'manifest': _changeidtmpl,
+    b'file_copy': b'{name} ({source})',
+    b'envvar': b'{key}={value}',
+    b'extra': b'{key}={value|stringescape}',
 }
 # filecopy is preserved for compatibility reasons
-defaulttempl['filecopy'] = defaulttempl['file_copy']
+defaulttempl[b'filecopy'] = defaulttempl[b'file_copy']
 
 # keywords are callables (see registrar.templatekeyword for details)
 keywords = {}
 templatekeyword = registrar.templatekeyword(keywords)
 
 
-@templatekeyword('author', requires={'ctx'})
+@templatekeyword(b'author', requires={b'ctx'})
 def showauthor(context, mapping):
     """Alias for ``{user}``"""
     return showuser(context, mapping)
 
 
-@templatekeyword('bisect', requires={'repo', 'ctx'})
+@templatekeyword(b'bisect', requires={b'repo', b'ctx'})
 def showbisect(context, mapping):
     """String. The changeset bisection status."""
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
     return hbisect.label(repo, ctx.node())
 
 
-@templatekeyword('branch', requires={'ctx'})
+@templatekeyword(b'branch', requires={b'ctx'})
 def showbranch(context, mapping):
     """String. The name of the branch on which the changeset was
     committed.
     """
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     return ctx.branch()
 
 
-@templatekeyword('branches', requires={'ctx'})
+@templatekeyword(b'branches', requires={b'ctx'})
 def showbranches(context, mapping):
     """List of strings. The name of the branch on which the
     changeset was committed. Will be empty if the branch name was
     default. (DEPRECATED)
     """
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     branch = ctx.branch()
-    if branch != 'default':
+    if branch != b'default':
         return compatlist(
-            context, mapping, 'branch', [branch], plural='branches'
+            context, mapping, b'branch', [branch], plural=b'branches'
         )
-    return compatlist(context, mapping, 'branch', [], plural='branches')
+    return compatlist(context, mapping, b'branch', [], plural=b'branches')
 
 
-@templatekeyword('bookmarks', requires={'repo', 'ctx'})
+@templatekeyword(b'bookmarks', requires={b'repo', b'ctx'})
 def showbookmarks(context, mapping):
     """List of strings. Any bookmarks associated with the
     changeset. Also sets 'active', the name of the active bookmark.
     """
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
     bookmarks = ctx.bookmarks()
     active = repo._activebookmark
-    makemap = lambda v: {'bookmark': v, 'active': active, 'current': active}
-    f = _showcompatlist(context, mapping, 'bookmark', bookmarks)
+    makemap = lambda v: {b'bookmark': v, b'active': active, b'current': active}
+    f = _showcompatlist(context, mapping, b'bookmark', bookmarks)
     return _hybrid(f, bookmarks, makemap, pycompat.identity)
 
 
-@templatekeyword('children', requires={'ctx'})
+@templatekeyword(b'children', requires={b'ctx'})
 def showchildren(context, mapping):
     """List of strings. The children of the changeset."""
-    ctx = context.resource(mapping, 'ctx')
-    childrevs = ['%d:%s' % (cctx.rev(), cctx) for cctx in ctx.children()]
-    return compatlist(context, mapping, 'children', childrevs, element='child')
+    ctx = context.resource(mapping, b'ctx')
+    childrevs = [b'%d:%s' % (cctx.rev(), cctx) for cctx in ctx.children()]
+    return compatlist(
+        context, mapping, b'children', childrevs, element=b'child'
+    )
 
 
 # Deprecated, but kept alive for help generation a purpose.
-@templatekeyword('currentbookmark', requires={'repo', 'ctx'})
+@templatekeyword(b'currentbookmark', requires={b'repo', b'ctx'})
 def showcurrentbookmark(context, mapping):
     """String. The active bookmark, if it is associated with the changeset.
     (DEPRECATED)"""
     return showactivebookmark(context, mapping)
 
 
-@templatekeyword('activebookmark', requires={'repo', 'ctx'})
+@templatekeyword(b'activebookmark', requires={b'repo', b'ctx'})
 def showactivebookmark(context, mapping):
     """String. The active bookmark, if it is associated with the changeset."""
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
     active = repo._activebookmark
     if active and active in ctx.bookmarks():
         return active
-    return ''
+    return b''
 
 
-@templatekeyword('date', requires={'ctx'})
+@templatekeyword(b'date', requires={b'ctx'})
 def showdate(context, mapping):
     """Date information. The date when the changeset was committed."""
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     # the default string format is '<float(unixtime)><tzoffset>' because
     # python-hglib splits date at decimal separator.
-    return templateutil.date(ctx.date(), showfmt='%d.0%d')
+    return templateutil.date(ctx.date(), showfmt=b'%d.0%d')
 
 
-@templatekeyword('desc', requires={'ctx'})
+@templatekeyword(b'desc', requires={b'ctx'})
 def showdescription(context, mapping):
     """String. The text of the changeset description."""
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     s = ctx.description()
     if isinstance(s, encoding.localstr):
         # try hard to preserve utf-8 bytes
@@ -260,144 +262,146 @@
         return s.strip()
 
 
-@templatekeyword('diffstat', requires={'ui', 'ctx'})
+@templatekeyword(b'diffstat', requires={b'ui', b'ctx'})
 def showdiffstat(context, mapping):
     """String. Statistics of changes with the following format:
     "modified files: +added/-removed lines"
     """
-    ui = context.resource(mapping, 'ui')
-    ctx = context.resource(mapping, 'ctx')
-    diffopts = diffutil.diffallopts(ui, {'noprefix': False})
+    ui = context.resource(mapping, b'ui')
+    ctx = context.resource(mapping, b'ctx')
+    diffopts = diffutil.diffallopts(ui, {b'noprefix': False})
     diff = ctx.diff(opts=diffopts)
     stats = patch.diffstatdata(util.iterlines(diff))
     maxname, maxtotal, adds, removes, binary = patch.diffstatsum(stats)
-    return '%d: +%d/-%d' % (len(stats), adds, removes)
+    return b'%d: +%d/-%d' % (len(stats), adds, removes)
 
 
-@templatekeyword('envvars', requires={'ui'})
+@templatekeyword(b'envvars', requires={b'ui'})
 def showenvvars(context, mapping):
     """A dictionary of environment variables. (EXPERIMENTAL)"""
-    ui = context.resource(mapping, 'ui')
+    ui = context.resource(mapping, b'ui')
     env = ui.exportableenviron()
     env = util.sortdict((k, env[k]) for k in sorted(env))
-    return compatdict(context, mapping, 'envvar', env, plural='envvars')
+    return compatdict(context, mapping, b'envvar', env, plural=b'envvars')
 
 
-@templatekeyword('extras', requires={'ctx'})
+@templatekeyword(b'extras', requires={b'ctx'})
 def showextras(context, mapping):
     """List of dicts with key, value entries of the 'extras'
     field of this changeset."""
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     extras = ctx.extra()
     extras = util.sortdict((k, extras[k]) for k in sorted(extras))
-    makemap = lambda k: {'key': k, 'value': extras[k]}
+    makemap = lambda k: {b'key': k, b'value': extras[k]}
     c = [makemap(k) for k in extras]
-    f = _showcompatlist(context, mapping, 'extra', c, plural='extras')
+    f = _showcompatlist(context, mapping, b'extra', c, plural=b'extras')
     return _hybrid(
         f,
         extras,
         makemap,
-        lambda k: '%s=%s' % (k, stringutil.escapestr(extras[k])),
+        lambda k: b'%s=%s' % (k, stringutil.escapestr(extras[k])),
     )
 
 
 def _getfilestatus(context, mapping, listall=False):
-    ctx = context.resource(mapping, 'ctx')
-    revcache = context.resource(mapping, 'revcache')
-    if 'filestatus' not in revcache or revcache['filestatusall'] < listall:
+    ctx = context.resource(mapping, b'ctx')
+    revcache = context.resource(mapping, b'revcache')
+    if b'filestatus' not in revcache or revcache[b'filestatusall'] < listall:
         stat = ctx.p1().status(
             ctx, listignored=listall, listclean=listall, listunknown=listall
         )
-        revcache['filestatus'] = stat
-        revcache['filestatusall'] = listall
-    return revcache['filestatus']
+        revcache[b'filestatus'] = stat
+        revcache[b'filestatusall'] = listall
+    return revcache[b'filestatus']
 
 
 def _getfilestatusmap(context, mapping, listall=False):
-    revcache = context.resource(mapping, 'revcache')
-    if 'filestatusmap' not in revcache or revcache['filestatusall'] < listall:
+    revcache = context.resource(mapping, b'revcache')
+    if b'filestatusmap' not in revcache or revcache[b'filestatusall'] < listall:
         stat = _getfilestatus(context, mapping, listall=listall)
-        revcache['filestatusmap'] = statmap = {}
-        for char, files in zip(pycompat.iterbytestr('MAR!?IC'), stat):
+        revcache[b'filestatusmap'] = statmap = {}
+        for char, files in zip(pycompat.iterbytestr(b'MAR!?IC'), stat):
             statmap.update((f, char) for f in files)
-    return revcache['filestatusmap']  # {path: statchar}
+    return revcache[b'filestatusmap']  # {path: statchar}
 
 
-@templatekeyword('file_copies', requires={'repo', 'ctx', 'cache', 'revcache'})
+@templatekeyword(
+    b'file_copies', requires={b'repo', b'ctx', b'cache', b'revcache'}
+)
 def showfilecopies(context, mapping):
     """List of strings. Files copied in this changeset with
     their sources.
     """
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
-    cache = context.resource(mapping, 'cache')
-    copies = context.resource(mapping, 'revcache').get('copies')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
+    cache = context.resource(mapping, b'cache')
+    copies = context.resource(mapping, b'revcache').get(b'copies')
     if copies is None:
-        if 'getcopies' not in cache:
-            cache['getcopies'] = scmutil.getcopiesfn(repo)
-        getcopies = cache['getcopies']
+        if b'getcopies' not in cache:
+            cache[b'getcopies'] = scmutil.getcopiesfn(repo)
+        getcopies = cache[b'getcopies']
         copies = getcopies(ctx)
     return templateutil.compatfilecopiesdict(
-        context, mapping, 'file_copy', copies
+        context, mapping, b'file_copy', copies
     )
 
 
 # showfilecopiesswitch() displays file copies only if copy records are
 # provided before calling the templater, usually with a --copies
 # command line switch.
-@templatekeyword('file_copies_switch', requires={'revcache'})
+@templatekeyword(b'file_copies_switch', requires={b'revcache'})
 def showfilecopiesswitch(context, mapping):
     """List of strings. Like "file_copies" but displayed
     only if the --copied switch is set.
     """
-    copies = context.resource(mapping, 'revcache').get('copies') or []
+    copies = context.resource(mapping, b'revcache').get(b'copies') or []
     return templateutil.compatfilecopiesdict(
-        context, mapping, 'file_copy', copies
+        context, mapping, b'file_copy', copies
     )
 
 
-@templatekeyword('file_adds', requires={'ctx', 'revcache'})
+@templatekeyword(b'file_adds', requires={b'ctx', b'revcache'})
 def showfileadds(context, mapping):
     """List of strings. Files added by this changeset."""
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     return templateutil.compatfileslist(
-        context, mapping, 'file_add', ctx.filesadded()
+        context, mapping, b'file_add', ctx.filesadded()
     )
 
 
-@templatekeyword('file_dels', requires={'ctx', 'revcache'})
+@templatekeyword(b'file_dels', requires={b'ctx', b'revcache'})
 def showfiledels(context, mapping):
     """List of strings. Files removed by this changeset."""
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     return templateutil.compatfileslist(
-        context, mapping, 'file_del', ctx.filesremoved()
+        context, mapping, b'file_del', ctx.filesremoved()
     )
 
 
-@templatekeyword('file_mods', requires={'ctx', 'revcache'})
+@templatekeyword(b'file_mods', requires={b'ctx', b'revcache'})
 def showfilemods(context, mapping):
     """List of strings. Files modified by this changeset."""
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     return templateutil.compatfileslist(
-        context, mapping, 'file_mod', ctx.filesmodified()
+        context, mapping, b'file_mod', ctx.filesmodified()
     )
 
 
-@templatekeyword('files', requires={'ctx'})
+@templatekeyword(b'files', requires={b'ctx'})
 def showfiles(context, mapping):
     """List of strings. All files modified, added, or removed by this
     changeset.
     """
-    ctx = context.resource(mapping, 'ctx')
-    return templateutil.compatfileslist(context, mapping, 'file', ctx.files())
+    ctx = context.resource(mapping, b'ctx')
+    return templateutil.compatfileslist(context, mapping, b'file', ctx.files())
 
 
-@templatekeyword('graphnode', requires={'repo', 'ctx'})
+@templatekeyword(b'graphnode', requires={b'repo', b'ctx'})
 def showgraphnode(context, mapping):
     """String. The character representing the changeset node in an ASCII
     revision graph."""
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
     return getgraphnode(repo, ctx)
 
 
@@ -410,37 +414,37 @@
     if wpnodes[1] == nullid:
         wpnodes = wpnodes[:1]
     if ctx.node() in wpnodes:
-        return '@'
+        return b'@'
     else:
-        return ''
+        return b''
 
 
 def getgraphnodesymbol(ctx):
     if ctx.obsolete():
-        return 'x'
+        return b'x'
     elif ctx.isunstable():
-        return '*'
+        return b'*'
     elif ctx.closesbranch():
-        return '_'
+        return b'_'
     else:
-        return 'o'
+        return b'o'
 
 
-@templatekeyword('graphwidth', requires=())
+@templatekeyword(b'graphwidth', requires=())
 def showgraphwidth(context, mapping):
     """Integer. The width of the graph drawn by 'log --graph' or zero."""
     # just hosts documentation; should be overridden by template mapping
     return 0
 
 
-@templatekeyword('index', requires=())
+@templatekeyword(b'index', requires=())
 def showindex(context, mapping):
     """Integer. The current iteration of the loop. (0 indexed)"""
     # just hosts documentation; should be overridden by template mapping
-    raise error.Abort(_("can't use index in this context"))
+    raise error.Abort(_(b"can't use index in this context"))
 
 
-@templatekeyword('latesttag', requires={'repo', 'ctx', 'cache'})
+@templatekeyword(b'latesttag', requires={b'repo', b'ctx', b'cache'})
 def showlatesttag(context, mapping):
     """List of strings. The global tags on the most recent globally
     tagged ancestor of this changeset.  If no such tags exist, the list
@@ -457,54 +461,54 @@
     # branches in a stable manner- it is the date the tagged cset was created,
     # not the date the tag was created.  Therefore it isn't made visible here.
     makemap = lambda v: {
-        'changes': _showchangessincetag,
-        'distance': latesttags[1],
-        'latesttag': v,  # BC with {latesttag % '{latesttag}'}
-        'tag': v,
+        b'changes': _showchangessincetag,
+        b'distance': latesttags[1],
+        b'latesttag': v,  # BC with {latesttag % '{latesttag}'}
+        b'tag': v,
     }
 
     tags = latesttags[2]
-    f = _showcompatlist(context, mapping, 'latesttag', tags, separator=':')
+    f = _showcompatlist(context, mapping, b'latesttag', tags, separator=b':')
     return _hybrid(f, tags, makemap, pycompat.identity)
 
 
-@templatekeyword('latesttagdistance', requires={'repo', 'ctx', 'cache'})
+@templatekeyword(b'latesttagdistance', requires={b'repo', b'ctx', b'cache'})
 def showlatesttagdistance(context, mapping):
     """Integer. Longest path to the latest tag."""
     return getlatesttags(context, mapping)[1]
 
 
-@templatekeyword('changessincelatesttag', requires={'repo', 'ctx', 'cache'})
+@templatekeyword(b'changessincelatesttag', requires={b'repo', b'ctx', b'cache'})
 def showchangessincelatesttag(context, mapping):
     """Integer. All ancestors not in the latest tag."""
     tag = getlatesttags(context, mapping)[2][0]
-    mapping = context.overlaymap(mapping, {'tag': tag})
+    mapping = context.overlaymap(mapping, {b'tag': tag})
     return _showchangessincetag(context, mapping)
 
 
 def _showchangessincetag(context, mapping):
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
     offset = 0
     revs = [ctx.rev()]
-    tag = context.symbol(mapping, 'tag')
+    tag = context.symbol(mapping, b'tag')
 
     # The only() revset doesn't currently support wdir()
     if ctx.rev() is None:
         offset = 1
         revs = [p.rev() for p in ctx.parents()]
 
-    return len(repo.revs('only(%ld, %s)', revs, tag)) + offset
+    return len(repo.revs(b'only(%ld, %s)', revs, tag)) + offset
 
 
 # teach templater latesttags.changes is switched to (context, mapping) API
-_showchangessincetag._requires = {'repo', 'ctx'}
+_showchangessincetag._requires = {b'repo', b'ctx'}
 
 
-@templatekeyword('manifest', requires={'repo', 'ctx'})
+@templatekeyword(b'manifest', requires={b'repo', b'ctx'})
 def showmanifest(context, mapping):
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
     mnode = ctx.manifestnode()
     if mnode is None:
         mnode = wdirid
@@ -512,14 +516,14 @@
     else:
         mrev = repo.manifestlog.rev(mnode)
     mhex = hex(mnode)
-    mapping = context.overlaymap(mapping, {'rev': mrev, 'node': mhex})
-    f = context.process('manifest', mapping)
+    mapping = context.overlaymap(mapping, {b'rev': mrev, b'node': mhex})
+    f = context.process(b'manifest', mapping)
     return templateutil.hybriditem(
-        f, None, f, lambda x: {'rev': mrev, 'node': mhex}
+        f, None, f, lambda x: {b'rev': mrev, b'node': mhex}
     )
 
 
-@templatekeyword('obsfate', requires={'ui', 'repo', 'ctx'})
+@templatekeyword(b'obsfate', requires={b'ui', b'repo', b'ctx'})
 def showobsfate(context, mapping):
     # this function returns a list containing pre-formatted obsfate strings.
     #
@@ -527,23 +531,23 @@
     # the verbosity templatekw available.
     succsandmarkers = showsuccsandmarkers(context, mapping)
 
-    ui = context.resource(mapping, 'ui')
-    repo = context.resource(mapping, 'repo')
+    ui = context.resource(mapping, b'ui')
+    repo = context.resource(mapping, b'repo')
     values = []
 
     for x in succsandmarkers.tovalue(context, mapping):
         v = obsutil.obsfateprinter(
-            ui, repo, x['successors'], x['markers'], scmutil.formatchangeid
+            ui, repo, x[b'successors'], x[b'markers'], scmutil.formatchangeid
         )
         values.append(v)
 
-    return compatlist(context, mapping, "fate", values)
+    return compatlist(context, mapping, b"fate", values)
 
 
 def shownames(context, mapping, namespace):
     """helper method to generate a template keyword for a namespace"""
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
     ns = repo.names[namespace]
     names = ns.names(repo, ctx.node())
     return compatlist(
@@ -551,127 +555,127 @@
     )
 
 
-@templatekeyword('namespaces', requires={'repo', 'ctx'})
+@templatekeyword(b'namespaces', requires={b'repo', b'ctx'})
 def shownamespaces(context, mapping):
     """Dict of lists. Names attached to this changeset per
     namespace."""
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
 
     namespaces = util.sortdict()
 
     def makensmapfn(ns):
         # 'name' for iterating over namespaces, templatename for local reference
-        return lambda v: {'name': v, ns.templatename: v}
+        return lambda v: {b'name': v, ns.templatename: v}
 
     for k, ns in repo.names.iteritems():
         names = ns.names(repo, ctx.node())
-        f = _showcompatlist(context, mapping, 'name', names)
+        f = _showcompatlist(context, mapping, b'name', names)
         namespaces[k] = _hybrid(f, names, makensmapfn(ns), pycompat.identity)
 
-    f = _showcompatlist(context, mapping, 'namespace', list(namespaces))
+    f = _showcompatlist(context, mapping, b'namespace', list(namespaces))
 
     def makemap(ns):
         return {
-            'namespace': ns,
-            'names': namespaces[ns],
-            'builtin': repo.names[ns].builtin,
-            'colorname': repo.names[ns].colorname,
+            b'namespace': ns,
+            b'names': namespaces[ns],
+            b'builtin': repo.names[ns].builtin,
+            b'colorname': repo.names[ns].colorname,
         }
 
     return _hybrid(f, namespaces, makemap, pycompat.identity)
 
 
-@templatekeyword('negrev', requires={'repo', 'ctx'})
+@templatekeyword(b'negrev', requires={b'repo', b'ctx'})
 def shownegrev(context, mapping):
     """Integer. The repository-local changeset negative revision number,
     which counts in the opposite direction."""
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     rev = ctx.rev()
     if rev is None or rev < 0:  # wdir() or nullrev?
         return None
-    repo = context.resource(mapping, 'repo')
+    repo = context.resource(mapping, b'repo')
     return rev - len(repo)
 
 
-@templatekeyword('node', requires={'ctx'})
+@templatekeyword(b'node', requires={b'ctx'})
 def shownode(context, mapping):
     """String. The changeset identification hash, as a 40 hexadecimal
     digit string.
     """
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     return ctx.hex()
 
 
-@templatekeyword('obsolete', requires={'ctx'})
+@templatekeyword(b'obsolete', requires={b'ctx'})
 def showobsolete(context, mapping):
     """String. Whether the changeset is obsolete. (EXPERIMENTAL)"""
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     if ctx.obsolete():
-        return 'obsolete'
-    return ''
+        return b'obsolete'
+    return b''
 
 
-@templatekeyword('path', requires={'fctx'})
+@templatekeyword(b'path', requires={b'fctx'})
 def showpath(context, mapping):
     """String. Repository-absolute path of the current file. (EXPERIMENTAL)"""
-    fctx = context.resource(mapping, 'fctx')
+    fctx = context.resource(mapping, b'fctx')
     return fctx.path()
 
 
-@templatekeyword('peerurls', requires={'repo'})
+@templatekeyword(b'peerurls', requires={b'repo'})
 def showpeerurls(context, mapping):
     """A dictionary of repository locations defined in the [paths] section
     of your configuration file."""
-    repo = context.resource(mapping, 'repo')
+    repo = context.resource(mapping, b'repo')
     # see commands.paths() for naming of dictionary keys
     paths = repo.ui.paths
     urls = util.sortdict((k, p.rawloc) for k, p in sorted(paths.iteritems()))
 
     def makemap(k):
         p = paths[k]
-        d = {'name': k, 'url': p.rawloc}
+        d = {b'name': k, b'url': p.rawloc}
         d.update((o, v) for o, v in sorted(p.suboptions.iteritems()))
         return d
 
-    return _hybrid(None, urls, makemap, lambda k: '%s=%s' % (k, urls[k]))
+    return _hybrid(None, urls, makemap, lambda k: b'%s=%s' % (k, urls[k]))
 
 
-@templatekeyword("predecessors", requires={'repo', 'ctx'})
+@templatekeyword(b"predecessors", requires={b'repo', b'ctx'})
 def showpredecessors(context, mapping):
     """Returns the list of the closest visible predecessors. (EXPERIMENTAL)"""
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
     predecessors = sorted(obsutil.closestpredecessors(repo, ctx.node()))
     predecessors = pycompat.maplist(hex, predecessors)
 
     return _hybrid(
         None,
         predecessors,
-        lambda x: {'ctx': repo[x]},
+        lambda x: {b'ctx': repo[x]},
         lambda x: scmutil.formatchangeid(repo[x]),
     )
 
 
-@templatekeyword('reporoot', requires={'repo'})
+@templatekeyword(b'reporoot', requires={b'repo'})
 def showreporoot(context, mapping):
     """String. The root directory of the current repository."""
-    repo = context.resource(mapping, 'repo')
+    repo = context.resource(mapping, b'repo')
     return repo.root
 
 
-@templatekeyword('size', requires={'fctx'})
+@templatekeyword(b'size', requires={b'fctx'})
 def showsize(context, mapping):
     """Integer. Size of the current file in bytes. (EXPERIMENTAL)"""
-    fctx = context.resource(mapping, 'fctx')
+    fctx = context.resource(mapping, b'fctx')
     return fctx.size()
 
 
 # requires 'fctx' to denote {status} depends on (ctx, path) pair
-@templatekeyword('status', requires={'ctx', 'fctx', 'revcache'})
+@templatekeyword(b'status', requires={b'ctx', b'fctx', b'revcache'})
 def showstatus(context, mapping):
     """String. Status code of the current file. (EXPERIMENTAL)"""
-    path = templateutil.runsymbol(context, mapping, 'path')
+    path = templateutil.runsymbol(context, mapping, b'path')
     path = templateutil.stringify(context, mapping, path)
     if not path:
         return
@@ -681,15 +685,15 @@
     return statmap.get(path)
 
 
-@templatekeyword("successorssets", requires={'repo', 'ctx'})
+@templatekeyword(b"successorssets", requires={b'repo', b'ctx'})
 def showsuccessorssets(context, mapping):
     """Returns a string of sets of successors for a changectx. Format used
     is: [ctx1, ctx2], [ctx3] if ctx has been split into ctx1 and ctx2
     while also diverged into ctx3. (EXPERIMENTAL)"""
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
     if not ctx.obsolete():
-        return ''
+        return b''
 
     ssets = obsutil.successorssets(repo, ctx.node(), closest=True)
     ssets = [[hex(n) for n in ss] for ss in ssets]
@@ -699,7 +703,7 @@
         h = _hybrid(
             None,
             ss,
-            lambda x: {'ctx': repo[x]},
+            lambda x: {b'ctx': repo[x]},
             lambda x: scmutil.formatchangeid(repo[x]),
         )
         data.append(h)
@@ -709,22 +713,22 @@
         return templateutil.stringify(context, mapping, d)
 
     def gen(data):
-        yield "; ".join(render(d) for d in data)
+        yield b"; ".join(render(d) for d in data)
 
     return _hybrid(
-        gen(data), data, lambda x: {'successorset': x}, pycompat.identity
+        gen(data), data, lambda x: {b'successorset': x}, pycompat.identity
     )
 
 
-@templatekeyword("succsandmarkers", requires={'repo', 'ctx'})
+@templatekeyword(b"succsandmarkers", requires={b'repo', b'ctx'})
 def showsuccsandmarkers(context, mapping):
     """Returns a list of dict for each final successor of ctx. The dict
     contains successors node id in "successors" keys and the list of
     obs-markers from ctx to the set of successors in "markers".
     (EXPERIMENTAL)
     """
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
 
     values = obsutil.successorsandmarkers(repo, ctx)
 
@@ -735,19 +739,19 @@
     data = []
     for i in values:
         # Format successors
-        successors = i['successors']
+        successors = i[b'successors']
 
         successors = [hex(n) for n in successors]
         successors = _hybrid(
             None,
             successors,
-            lambda x: {'ctx': repo[x]},
+            lambda x: {b'ctx': repo[x]},
             lambda x: scmutil.formatchangeid(repo[x]),
         )
 
         # Format markers
         finalmarkers = []
-        for m in i['markers']:
+        for m in i[b'markers']:
             hexprec = hex(m[0])
             hexsucs = tuple(hex(n) for n in m[1])
             hexparents = None
@@ -756,130 +760,130 @@
             newmarker = (hexprec, hexsucs) + m[2:5] + (hexparents,) + m[6:]
             finalmarkers.append(newmarker)
 
-        data.append({'successors': successors, 'markers': finalmarkers})
+        data.append({b'successors': successors, b'markers': finalmarkers})
 
     return templateutil.mappinglist(data)
 
 
-@templatekeyword('p1', requires={'ctx'})
+@templatekeyword(b'p1', requires={b'ctx'})
 def showp1(context, mapping):
     """Changeset. The changeset's first parent. ``{p1.rev}`` for the revision
     number, and ``{p1.node}`` for the identification hash."""
-    ctx = context.resource(mapping, 'ctx')
-    return templateutil.mappingdict({'ctx': ctx.p1()}, tmpl=_changeidtmpl)
+    ctx = context.resource(mapping, b'ctx')
+    return templateutil.mappingdict({b'ctx': ctx.p1()}, tmpl=_changeidtmpl)
 
 
-@templatekeyword('p2', requires={'ctx'})
+@templatekeyword(b'p2', requires={b'ctx'})
 def showp2(context, mapping):
     """Changeset. The changeset's second parent. ``{p2.rev}`` for the revision
     number, and ``{p2.node}`` for the identification hash."""
-    ctx = context.resource(mapping, 'ctx')
-    return templateutil.mappingdict({'ctx': ctx.p2()}, tmpl=_changeidtmpl)
+    ctx = context.resource(mapping, b'ctx')
+    return templateutil.mappingdict({b'ctx': ctx.p2()}, tmpl=_changeidtmpl)
 
 
-@templatekeyword('p1rev', requires={'ctx'})
+@templatekeyword(b'p1rev', requires={b'ctx'})
 def showp1rev(context, mapping):
     """Integer. The repository-local revision number of the changeset's
     first parent, or -1 if the changeset has no parents. (DEPRECATED)"""
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     return ctx.p1().rev()
 
 
-@templatekeyword('p2rev', requires={'ctx'})
+@templatekeyword(b'p2rev', requires={b'ctx'})
 def showp2rev(context, mapping):
     """Integer. The repository-local revision number of the changeset's
     second parent, or -1 if the changeset has no second parent. (DEPRECATED)"""
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     return ctx.p2().rev()
 
 
-@templatekeyword('p1node', requires={'ctx'})
+@templatekeyword(b'p1node', requires={b'ctx'})
 def showp1node(context, mapping):
     """String. The identification hash of the changeset's first parent,
     as a 40 digit hexadecimal string. If the changeset has no parents, all
     digits are 0. (DEPRECATED)"""
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     return ctx.p1().hex()
 
 
-@templatekeyword('p2node', requires={'ctx'})
+@templatekeyword(b'p2node', requires={b'ctx'})
 def showp2node(context, mapping):
     """String. The identification hash of the changeset's second
     parent, as a 40 digit hexadecimal string. If the changeset has no second
     parent, all digits are 0. (DEPRECATED)"""
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     return ctx.p2().hex()
 
 
-@templatekeyword('parents', requires={'repo', 'ctx'})
+@templatekeyword(b'parents', requires={b'repo', b'ctx'})
 def showparents(context, mapping):
     """List of strings. The parents of the changeset in "rev:node"
     format. If the changeset has only one "natural" parent (the predecessor
     revision) nothing is shown."""
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
     pctxs = scmutil.meaningfulparents(repo, ctx)
     prevs = [p.rev() for p in pctxs]
     parents = [
-        [('rev', p.rev()), ('node', p.hex()), ('phase', p.phasestr())]
+        [(b'rev', p.rev()), (b'node', p.hex()), (b'phase', p.phasestr())]
         for p in pctxs
     ]
-    f = _showcompatlist(context, mapping, 'parent', parents)
+    f = _showcompatlist(context, mapping, b'parent', parents)
     return _hybrid(
         f,
         prevs,
-        lambda x: {'ctx': repo[x]},
+        lambda x: {b'ctx': repo[x]},
         lambda x: scmutil.formatchangeid(repo[x]),
         keytype=int,
     )
 
 
-@templatekeyword('phase', requires={'ctx'})
+@templatekeyword(b'phase', requires={b'ctx'})
 def showphase(context, mapping):
     """String. The changeset phase name."""
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     return ctx.phasestr()
 
 
-@templatekeyword('phaseidx', requires={'ctx'})
+@templatekeyword(b'phaseidx', requires={b'ctx'})
 def showphaseidx(context, mapping):
     """Integer. The changeset phase index. (ADVANCED)"""
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     return ctx.phase()
 
 
-@templatekeyword('rev', requires={'ctx'})
+@templatekeyword(b'rev', requires={b'ctx'})
 def showrev(context, mapping):
     """Integer. The repository-local changeset revision number."""
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     return scmutil.intrev(ctx)
 
 
 def showrevslist(context, mapping, name, revs):
     """helper to generate a list of revisions in which a mapped template will
     be evaluated"""
-    repo = context.resource(mapping, 'repo')
+    repo = context.resource(mapping, b'repo')
     # revs may be a smartset; don't compute it until f() has to be evaluated
     def f():
-        srevs = ['%d' % r for r in revs]
+        srevs = [b'%d' % r for r in revs]
         return _showcompatlist(context, mapping, name, srevs)
 
     return _hybrid(
         f,
         revs,
-        lambda x: {name: x, 'ctx': repo[x]},
+        lambda x: {name: x, b'ctx': repo[x]},
         pycompat.identity,
         keytype=int,
     )
 
 
-@templatekeyword('subrepos', requires={'ctx'})
+@templatekeyword(b'subrepos', requires={b'ctx'})
 def showsubrepos(context, mapping):
     """List of strings. Updated subrepositories in the changeset."""
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     substate = ctx.substate
     if not substate:
-        return compatlist(context, mapping, 'subrepo', [])
+        return compatlist(context, mapping, b'subrepo', [])
     psubstate = ctx.p1().substate or {}
     subrepos = []
     for sub in substate:
@@ -888,91 +892,91 @@
     for sub in psubstate:
         if sub not in substate:
             subrepos.append(sub)  # removed in ctx
-    return compatlist(context, mapping, 'subrepo', sorted(subrepos))
+    return compatlist(context, mapping, b'subrepo', sorted(subrepos))
 
 
 # don't remove "showtags" definition, even though namespaces will put
 # a helper function for "tags" keyword into "keywords" map automatically,
 # because online help text is built without namespaces initialization
-@templatekeyword('tags', requires={'repo', 'ctx'})
+@templatekeyword(b'tags', requires={b'repo', b'ctx'})
 def showtags(context, mapping):
     """List of strings. Any tags associated with the changeset."""
-    return shownames(context, mapping, 'tags')
+    return shownames(context, mapping, b'tags')
 
 
-@templatekeyword('termwidth', requires={'ui'})
+@templatekeyword(b'termwidth', requires={b'ui'})
 def showtermwidth(context, mapping):
     """Integer. The width of the current terminal."""
-    ui = context.resource(mapping, 'ui')
+    ui = context.resource(mapping, b'ui')
     return ui.termwidth()
 
 
-@templatekeyword('user', requires={'ctx'})
+@templatekeyword(b'user', requires={b'ctx'})
 def showuser(context, mapping):
     """String. The unmodified author of the changeset."""
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     return ctx.user()
 
 
-@templatekeyword('instabilities', requires={'ctx'})
+@templatekeyword(b'instabilities', requires={b'ctx'})
 def showinstabilities(context, mapping):
     """List of strings. Evolution instabilities affecting the changeset.
     (EXPERIMENTAL)
     """
-    ctx = context.resource(mapping, 'ctx')
+    ctx = context.resource(mapping, b'ctx')
     return compatlist(
         context,
         mapping,
-        'instability',
+        b'instability',
         ctx.instabilities(),
-        plural='instabilities',
+        plural=b'instabilities',
     )
 
 
-@templatekeyword('verbosity', requires={'ui'})
+@templatekeyword(b'verbosity', requires={b'ui'})
 def showverbosity(context, mapping):
     """String. The current output verbosity in 'debug', 'quiet', 'verbose',
     or ''."""
-    ui = context.resource(mapping, 'ui')
+    ui = context.resource(mapping, b'ui')
     # see logcmdutil.changesettemplater for priority of these flags
     if ui.debugflag:
-        return 'debug'
+        return b'debug'
     elif ui.quiet:
-        return 'quiet'
+        return b'quiet'
     elif ui.verbose:
-        return 'verbose'
-    return ''
+        return b'verbose'
+    return b''
 
 
-@templatekeyword('whyunstable', requires={'repo', 'ctx'})
+@templatekeyword(b'whyunstable', requires={b'repo', b'ctx'})
 def showwhyunstable(context, mapping):
     """List of dicts explaining all instabilities of a changeset.
     (EXPERIMENTAL)
     """
-    repo = context.resource(mapping, 'repo')
-    ctx = context.resource(mapping, 'ctx')
+    repo = context.resource(mapping, b'repo')
+    ctx = context.resource(mapping, b'ctx')
 
     def formatnode(ctx):
-        return '%s (%s)' % (scmutil.formatchangeid(ctx), ctx.phasestr())
+        return b'%s (%s)' % (scmutil.formatchangeid(ctx), ctx.phasestr())
 
     entries = obsutil.whyunstable(repo, ctx)
 
     for entry in entries:
-        if entry.get('divergentnodes'):
-            dnodes = entry['divergentnodes']
+        if entry.get(b'divergentnodes'):
+            dnodes = entry[b'divergentnodes']
             dnhybrid = _hybrid(
                 None,
                 [dnode.hex() for dnode in dnodes],
-                lambda x: {'ctx': repo[x]},
+                lambda x: {b'ctx': repo[x]},
                 lambda x: formatnode(repo[x]),
             )
-            entry['divergentnodes'] = dnhybrid
+            entry[b'divergentnodes'] = dnhybrid
 
     tmpl = (
-        '{instability}:{if(divergentnodes, " ")}{divergentnodes} '
-        '{reason} {node|short}'
+        b'{instability}:{if(divergentnodes, " ")}{divergentnodes} '
+        b'{reason} {node|short}'
     )
-    return templateutil.mappinglist(entries, tmpl=tmpl, sep='\n')
+    return templateutil.mappinglist(entries, tmpl=tmpl, sep=b'\n')
 
 
 def loadkeyword(ui, extname, registrarobj):
--- a/mercurial/templater.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/templater.py	Sun Oct 06 09:48:39 2019 -0400
@@ -85,22 +85,22 @@
 
 elements = {
     # token-type: binding-strength, primary, prefix, infix, suffix
-    "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None),
-    ".": (18, None, None, (".", 18), None),
-    "%": (15, None, None, ("%", 15), None),
-    "|": (15, None, None, ("|", 15), None),
-    "*": (5, None, None, ("*", 5), None),
-    "/": (5, None, None, ("/", 5), None),
-    "+": (4, None, None, ("+", 4), None),
-    "-": (4, None, ("negate", 19), ("-", 4), None),
-    "=": (3, None, None, ("keyvalue", 3), None),
-    ",": (2, None, None, ("list", 2), None),
-    ")": (0, None, None, None, None),
-    "integer": (0, "integer", None, None, None),
-    "symbol": (0, "symbol", None, None, None),
-    "string": (0, "string", None, None, None),
-    "template": (0, "template", None, None, None),
-    "end": (0, None, None, None, None),
+    b"(": (20, None, (b"group", 1, b")"), (b"func", 1, b")"), None),
+    b".": (18, None, None, (b".", 18), None),
+    b"%": (15, None, None, (b"%", 15), None),
+    b"|": (15, None, None, (b"|", 15), None),
+    b"*": (5, None, None, (b"*", 5), None),
+    b"/": (5, None, None, (b"/", 5), None),
+    b"+": (4, None, None, (b"+", 4), None),
+    b"-": (4, None, (b"negate", 19), (b"-", 4), None),
+    b"=": (3, None, None, (b"keyvalue", 3), None),
+    b",": (2, None, None, (b"list", 2), None),
+    b")": (0, None, None, None, None),
+    b"integer": (0, b"integer", None, None, None),
+    b"symbol": (0, b"symbol", None, None, None),
+    b"string": (0, b"string", None, None, None),
+    b"template": (0, b"template", None, None, None),
+    b"end": (0, None, None, None, None),
 }
 
 
@@ -113,28 +113,28 @@
         c = program[pos]
         if c.isspace():  # skip inter-token whitespace
             pass
-        elif c in "(=,).%|+-*/":  # handle simple operators
+        elif c in b"(=,).%|+-*/":  # handle simple operators
             yield (c, None, pos)
-        elif c in '"\'':  # handle quoted templates
+        elif c in b'"\'':  # handle quoted templates
             s = pos + 1
             data, pos = _parsetemplate(program, s, end, c)
-            yield ('template', data, s)
+            yield (b'template', data, s)
             pos -= 1
-        elif c == 'r' and program[pos : pos + 2] in ("r'", 'r"'):
+        elif c == b'r' and program[pos : pos + 2] in (b"r'", b'r"'):
             # handle quoted strings
             c = program[pos + 1]
             s = pos = pos + 2
             while pos < end:  # find closing quote
                 d = program[pos]
-                if d == '\\':  # skip over escaped characters
+                if d == b'\\':  # skip over escaped characters
                     pos += 2
                     continue
                 if d == c:
-                    yield ('string', program[s:pos], s)
+                    yield (b'string', program[s:pos], s)
                     break
                 pos += 1
             else:
-                raise error.ParseError(_("unterminated string"), s)
+                raise error.ParseError(_(b"unterminated string"), s)
         elif c.isdigit():
             s = pos
             while pos < end:
@@ -142,12 +142,12 @@
                 if not d.isdigit():
                     break
                 pos += 1
-            yield ('integer', program[s:pos], s)
+            yield (b'integer', program[s:pos], s)
             pos -= 1
         elif (
-            c == '\\'
+            c == b'\\'
             and program[pos : pos + 2] in (br"\'", br'\"')
-            or c == 'r'
+            or c == b'r'
             and program[pos : pos + 3] in (br"r\'", br'r\"')
         ):
             # handle escaped quoted strings for compatibility with 2.9.2-3.4,
@@ -160,51 +160,51 @@
             # {f("\\\\ {g(\"\\\"\")}"}    \\ {g("\"")}    [r'\\', {g("\"")}]
             #             ~~~~~~~~
             #             escaped quoted string
-            if c == 'r':
+            if c == b'r':
                 pos += 1
-                token = 'string'
+                token = b'string'
             else:
-                token = 'template'
+                token = b'template'
             quote = program[pos : pos + 2]
             s = pos = pos + 2
             while pos < end:  # find closing escaped quote
-                if program.startswith('\\\\\\', pos, end):
+                if program.startswith(b'\\\\\\', pos, end):
                     pos += 4  # skip over double escaped characters
                     continue
                 if program.startswith(quote, pos, end):
                     # interpret as if it were a part of an outer string
                     data = parser.unescapestr(program[s:pos])
-                    if token == 'template':
+                    if token == b'template':
                         data = _parsetemplate(data, 0, len(data))[0]
                     yield (token, data, s)
                     pos += 1
                     break
                 pos += 1
             else:
-                raise error.ParseError(_("unterminated string"), s)
-        elif c.isalnum() or c in '_':
+                raise error.ParseError(_(b"unterminated string"), s)
+        elif c.isalnum() or c in b'_':
             s = pos
             pos += 1
             while pos < end:  # find end of symbol
                 d = program[pos]
-                if not (d.isalnum() or d == "_"):
+                if not (d.isalnum() or d == b"_"):
                     break
                 pos += 1
             sym = program[s:pos]
-            yield ('symbol', sym, s)
+            yield (b'symbol', sym, s)
             pos -= 1
         elif c == term:
-            yield ('end', None, pos)
+            yield (b'end', None, pos)
             return
         else:
-            raise error.ParseError(_("syntax error"), pos)
+            raise error.ParseError(_(b"syntax error"), pos)
         pos += 1
     if term:
-        raise error.ParseError(_("unterminated template expansion"), start)
-    yield ('end', None, pos)
+        raise error.ParseError(_(b"unterminated template expansion"), start)
+    yield (b'end', None, pos)
 
 
-def _parsetemplate(tmpl, start, stop, quote=''):
+def _parsetemplate(tmpl, start, stop, quote=b''):
     r"""
     >>> _parsetemplate(b'foo{bar}"baz', 0, 12)
     ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12)
@@ -219,15 +219,15 @@
     """
     parsed = []
     for typ, val, pos in _scantemplate(tmpl, start, stop, quote):
-        if typ == 'string':
+        if typ == b'string':
             parsed.append((typ, val))
-        elif typ == 'template':
+        elif typ == b'template':
             parsed.append(val)
-        elif typ == 'end':
+        elif typ == b'end':
             return parsed, pos
         else:
-            raise error.ProgrammingError('unexpected type: %s' % typ)
-    raise error.ProgrammingError('unterminated scanning of template')
+            raise error.ProgrammingError(b'unexpected type: %s' % typ)
+    raise error.ProgrammingError(b'unterminated scanning of template')
 
 
 def scantemplate(tmpl, raw=False):
@@ -252,16 +252,16 @@
     for typ, val, pos in _scantemplate(tmpl, 0, len(tmpl), raw=raw):
         if last:
             yield last + (pos,)
-        if typ == 'end':
+        if typ == b'end':
             return
         else:
             last = (typ, pos)
-    raise error.ProgrammingError('unterminated scanning of template')
+    raise error.ProgrammingError(b'unterminated scanning of template')
 
 
-def _scantemplate(tmpl, start, stop, quote='', raw=False):
+def _scantemplate(tmpl, start, stop, quote=b'', raw=False):
     """Parse template string into chunks of strings and template expressions"""
-    sepchars = '{' + quote
+    sepchars = b'{' + quote
     unescape = [parser.unescapestr, pycompat.identity][raw]
     pos = start
     p = parser.parser(elements)
@@ -272,49 +272,49 @@
                 key=lambda n: (n < 0, n),
             )
             if n < 0:
-                yield ('string', unescape(tmpl[pos:stop]), pos)
+                yield (b'string', unescape(tmpl[pos:stop]), pos)
                 pos = stop
                 break
             c = tmpl[n : n + 1]
             bs = 0  # count leading backslashes
             if not raw:
-                bs = (n - pos) - len(tmpl[pos:n].rstrip('\\'))
+                bs = (n - pos) - len(tmpl[pos:n].rstrip(b'\\'))
             if bs % 2 == 1:
                 # escaped (e.g. '\{', '\\\{', but not '\\{')
-                yield ('string', unescape(tmpl[pos : n - 1]) + c, pos)
+                yield (b'string', unescape(tmpl[pos : n - 1]) + c, pos)
                 pos = n + 1
                 continue
             if n > pos:
-                yield ('string', unescape(tmpl[pos:n]), pos)
+                yield (b'string', unescape(tmpl[pos:n]), pos)
             if c == quote:
-                yield ('end', None, n + 1)
+                yield (b'end', None, n + 1)
                 return
 
-            parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}'))
-            if not tmpl.startswith('}', pos):
-                raise error.ParseError(_("invalid token"), pos)
-            yield ('template', parseres, n)
+            parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, b'}'))
+            if not tmpl.startswith(b'}', pos):
+                raise error.ParseError(_(b"invalid token"), pos)
+            yield (b'template', parseres, n)
             pos += 1
 
         if quote:
-            raise error.ParseError(_("unterminated string"), start)
+            raise error.ParseError(_(b"unterminated string"), start)
     except error.ParseError as inst:
         if len(inst.args) > 1:  # has location
             loc = inst.args[1]
             # Offset the caret location by the number of newlines before the
             # location of the error, since we will replace one-char newlines
             # with the two-char literal r'\n'.
-            offset = tmpl[:loc].count('\n')
-            tmpl = tmpl.replace('\n', br'\n')
+            offset = tmpl[:loc].count(b'\n')
+            tmpl = tmpl.replace(b'\n', br'\n')
             # We want the caret to point to the place in the template that
             # failed to parse, but in a hint we get a open paren at the
             # start. Therefore, we print "loc + 1" spaces (instead of "loc")
             # to line up the caret with the location of the error.
             inst.hint = (
-                tmpl + '\n' + ' ' * (loc + 1 + offset) + '^ ' + _('here')
+                tmpl + b'\n' + b' ' * (loc + 1 + offset) + b'^ ' + _(b'here')
             )
         raise
-    yield ('end', None, pos)
+    yield (b'end', None, pos)
 
 
 def _unnesttemplatelist(tree):
@@ -339,14 +339,14 @@
     if not isinstance(tree, tuple):
         return tree
     op = tree[0]
-    if op != 'template':
+    if op != b'template':
         return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:])
 
     assert len(tree) == 2
     xs = tuple(_unnesttemplatelist(x) for x in tree[1])
     if not xs:
-        return ('string', '')  # empty template ""
-    elif len(xs) == 1 and xs[0][0] == 'string':
+        return (b'string', b'')  # empty template ""
+    elif len(xs) == 1 and xs[0][0] == b'string':
         return xs[0]  # fast path for string with no template fragment "x"
     else:
         return (op,) + xs
@@ -355,8 +355,8 @@
 def parse(tmpl):
     """Parse template string into tree"""
     parsed, pos = _parsetemplate(tmpl, 0, len(tmpl))
-    assert pos == len(tmpl), 'unquoted template should be consumed'
-    return _unnesttemplatelist(('template', parsed))
+    assert pos == len(tmpl), b'unquoted template should be consumed'
+    return _unnesttemplatelist((b'template', parsed))
 
 
 def _parseexpr(expr):
@@ -378,18 +378,18 @@
     p = parser.parser(elements)
     tree, pos = p.parse(tokenize(expr, 0, len(expr)))
     if pos != len(expr):
-        raise error.ParseError(_('invalid token'), pos)
+        raise error.ParseError(_(b'invalid token'), pos)
     return _unnesttemplatelist(tree)
 
 
 def prettyformat(tree):
-    return parser.prettyformat(tree, ('integer', 'string', 'symbol'))
+    return parser.prettyformat(tree, (b'integer', b'string', b'symbol'))
 
 
 def compileexp(exp, context, curmethods):
     """Compile parsed template tree to (func, data) pair"""
     if not exp:
-        raise error.ParseError(_("missing argument"))
+        raise error.ParseError(_(b"missing argument"))
     t = exp[0]
     return curmethods[t](exp, context)
 
@@ -398,15 +398,15 @@
 
 
 def getsymbol(exp):
-    if exp[0] == 'symbol':
+    if exp[0] == b'symbol':
         return exp[1]
-    raise error.ParseError(_("expected a symbol, got '%s'") % exp[0])
+    raise error.ParseError(_(b"expected a symbol, got '%s'") % exp[0])
 
 
 def getlist(x):
     if not x:
         return []
-    if x[0] == 'list':
+    if x[0] == b'list':
         return getlist(x[1]) + [x[2]]
     return [x]
 
@@ -414,18 +414,18 @@
 def gettemplate(exp, context):
     """Compile given template tree or load named template from map file;
     returns (func, data) pair"""
-    if exp[0] in ('template', 'string'):
+    if exp[0] in (b'template', b'string'):
         return compileexp(exp, context, methods)
-    if exp[0] == 'symbol':
+    if exp[0] == b'symbol':
         # unlike runsymbol(), here 'symbol' is always taken as template name
         # even if it exists in mapping. this allows us to override mapping
         # by web templates, e.g. 'changelogtag' is redefined in map file.
         return context._load(exp[1])
-    raise error.ParseError(_("expected template specifier"))
+    raise error.ParseError(_(b"expected template specifier"))
 
 
 def _runrecursivesymbol(context, mapping, key):
-    raise error.Abort(_("recursive reference '%s' in template") % key)
+    raise error.Abort(_(b"recursive reference '%s' in template") % key)
 
 
 def buildtemplate(exp, context):
@@ -443,7 +443,7 @@
         f = context._funcs[n]
         args = _buildfuncargs(exp[1], context, methods, n, f._argspec)
         return (f, args)
-    raise error.ParseError(_("unknown function '%s'") % n)
+    raise error.ParseError(_(b"unknown function '%s'") % n)
 
 
 def buildmap(exp, context):
@@ -478,10 +478,10 @@
     if n in context._filters:
         args = _buildfuncargs(exp[2], context, exprmethods, n, argspec=None)
         if len(args) != 1:
-            raise error.ParseError(_("filter %s expects one argument") % n)
+            raise error.ParseError(_(b"filter %s expects one argument") % n)
         f = context._filters[n]
         return (templateutil.runfilter, (args[0], f))
-    raise error.ParseError(_("unknown function '%s'") % n)
+    raise error.ParseError(_(b"unknown function '%s'") % n)
 
 
 def _buildfuncargs(exp, context, curmethods, funcname, argspec):
@@ -518,8 +518,8 @@
         getlist(exp),
         funcname,
         argspec,
-        keyvaluenode='keyvalue',
-        keynode='symbol',
+        keyvaluenode=b'keyvalue',
+        keynode=b'symbol',
     )
     compargs = util.sortdict()
     if varkey:
@@ -531,54 +531,54 @@
 
 
 def buildkeyvaluepair(exp, content):
-    raise error.ParseError(_("can't use a key-value pair in this context"))
+    raise error.ParseError(_(b"can't use a key-value pair in this context"))
 
 
 def buildlist(exp, context):
     raise error.ParseError(
-        _("can't use a list in this context"),
-        hint=_('check place of comma and parens'),
+        _(b"can't use a list in this context"),
+        hint=_(b'check place of comma and parens'),
     )
 
 
 # methods to interpret function arguments or inner expressions (e.g. {_(x)})
 exprmethods = {
-    "integer": lambda e, c: (templateutil.runinteger, e[1]),
-    "string": lambda e, c: (templateutil.runstring, e[1]),
-    "symbol": lambda e, c: (templateutil.runsymbol, e[1]),
-    "template": buildtemplate,
-    "group": lambda e, c: compileexp(e[1], c, exprmethods),
-    ".": buildmember,
-    "|": buildfilter,
-    "%": buildmap,
-    "func": buildfunc,
-    "keyvalue": buildkeyvaluepair,
-    "list": buildlist,
-    "+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b),
-    "-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b),
-    "negate": buildnegate,
-    "*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b),
-    "/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b),
+    b"integer": lambda e, c: (templateutil.runinteger, e[1]),
+    b"string": lambda e, c: (templateutil.runstring, e[1]),
+    b"symbol": lambda e, c: (templateutil.runsymbol, e[1]),
+    b"template": buildtemplate,
+    b"group": lambda e, c: compileexp(e[1], c, exprmethods),
+    b".": buildmember,
+    b"|": buildfilter,
+    b"%": buildmap,
+    b"func": buildfunc,
+    b"keyvalue": buildkeyvaluepair,
+    b"list": buildlist,
+    b"+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b),
+    b"-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b),
+    b"negate": buildnegate,
+    b"*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b),
+    b"/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b),
 }
 
 # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"})
 methods = exprmethods.copy()
-methods["integer"] = exprmethods["symbol"]  # '{1}' as variable
+methods[b"integer"] = exprmethods[b"symbol"]  # '{1}' as variable
 
 
 class _aliasrules(parser.basealiasrules):
     """Parsing and expansion rule set of template aliases"""
 
-    _section = _('template alias')
+    _section = _(b'template alias')
     _parse = staticmethod(_parseexpr)
 
     @staticmethod
     def _trygetfunc(tree):
         """Return (name, args) if tree is func(...) or ...|filter; otherwise
         None"""
-        if tree[0] == 'func' and tree[1][0] == 'symbol':
+        if tree[0] == b'func' and tree[1][0] == b'symbol':
             return tree[1][1], getlist(tree[2])
-        if tree[0] == '|' and tree[2][0] == 'symbol':
+        if tree[0] == b'|' and tree[2][0] == b'symbol':
             return tree[2][1], [tree[1]]
 
 
@@ -593,7 +593,7 @@
 
 def unquotestring(s):
     '''unwrap quotes if any; otherwise returns unmodified string'''
-    if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]:
+    if len(s) < 2 or s[0] not in b"'\"" or s[0] != s[-1]:
         return s
     return s[1:-1]
 
@@ -721,7 +721,7 @@
         v = self._resources.lookup(mapping, key)
         if v is None:
             raise templateutil.ResourceUnavailable(
-                _('template resource not available: %s') % key
+                _(b'template resource not available: %s') % key
             )
         return v
 
@@ -783,36 +783,36 @@
 def stylelist():
     paths = templatepaths()
     if not paths:
-        return _('no templates found, try `hg debuginstall` for more info')
+        return _(b'no templates found, try `hg debuginstall` for more info')
     dirlist = os.listdir(paths[0])
     stylelist = []
     for file in dirlist:
-        split = file.split(".")
-        if split[-1] in ('orig', 'rej'):
+        split = file.split(b".")
+        if split[-1] in (b'orig', b'rej'):
             continue
-        if split[0] == "map-cmdline":
+        if split[0] == b"map-cmdline":
             stylelist.append(split[1])
-    return ", ".join(sorted(stylelist))
+    return b", ".join(sorted(stylelist))
 
 
 def _readmapfile(mapfile):
     """Load template elements from the given map file"""
     if not os.path.exists(mapfile):
         raise error.Abort(
-            _("style '%s' not found") % mapfile,
-            hint=_("available styles: %s") % stylelist(),
+            _(b"style '%s' not found") % mapfile,
+            hint=_(b"available styles: %s") % stylelist(),
         )
 
     base = os.path.dirname(mapfile)
     conf = config.config(includepaths=templatepaths())
-    conf.read(mapfile, remap={'': 'templates'})
+    conf.read(mapfile, remap={b'': b'templates'})
 
     cache = {}
     tmap = {}
     aliases = []
 
-    val = conf.get('templates', '__base__')
-    if val and val[0] not in "'\"":
+    val = conf.get(b'templates', b'__base__')
+    if val and val[0] not in b"'\"":
         # treat as a pointer to a base class for this style
         path = util.normpath(os.path.join(base, val))
 
@@ -823,27 +823,27 @@
                 if os.path.isfile(p2):
                     path = p2
                     break
-                p3 = util.normpath(os.path.join(p2, "map"))
+                p3 = util.normpath(os.path.join(p2, b"map"))
                 if os.path.isfile(p3):
                     path = p3
                     break
 
         cache, tmap, aliases = _readmapfile(path)
 
-    for key, val in conf['templates'].items():
+    for key, val in conf[b'templates'].items():
         if not val:
             raise error.ParseError(
-                _('missing value'), conf.source('templates', key)
+                _(b'missing value'), conf.source(b'templates', key)
             )
-        if val[0] in "'\"":
+        if val[0] in b"'\"":
             if val[0] != val[-1]:
                 raise error.ParseError(
-                    _('unmatched quotes'), conf.source('templates', key)
+                    _(b'unmatched quotes'), conf.source(b'templates', key)
                 )
             cache[key] = unquotestring(val)
-        elif key != '__base__':
+        elif key != b'__base__':
             tmap[key] = os.path.join(base, val)
-    aliases.extend(conf['templatealias'].items())
+    aliases.extend(conf[b'templatealias'].items())
     return cache, tmap, aliases
 
 
@@ -867,10 +867,10 @@
                 self.cache[t] = util.readfile(self._map[t])
             except KeyError as inst:
                 raise templateutil.TemplateNotFound(
-                    _('"%s" not in template map') % inst.args[0]
+                    _(b'"%s" not in template map') % inst.args[0]
                 )
             except IOError as inst:
-                reason = _('template file %s: %s') % (
+                reason = _(b'template file %s: %s') % (
                     self._map[t],
                     stringutil.forcebytestr(inst.args[1]),
                 )
@@ -887,7 +887,7 @@
         if not tree:
             return
         op = tree[0]
-        if op == 'symbol':
+        if op == b'symbol':
             s = tree[1]
             if s in syms[0]:
                 return  # avoid recursion: s -> cache[s] -> s
@@ -896,14 +896,14 @@
                 # s may be a reference for named template
                 self._findsymbolsused(self.load(s), syms)
             return
-        if op in {'integer', 'string'}:
+        if op in {b'integer', b'string'}:
             return
         # '{arg|func}' == '{func(arg)}'
-        if op == '|':
+        if op == b'|':
             syms[1].add(getsymbol(tree[2]))
             self._findsymbolsused(tree[1], syms)
             return
-        if op == 'func':
+        if op == b'func':
             syms[1].add(getsymbol(tree[1]))
             self._findsymbolsused(tree[2], syms)
             return
@@ -997,7 +997,7 @@
 
         This may load additional templates from the map file.
         """
-        return self.symbolsused('')
+        return self.symbolsused(b'')
 
     def symbolsused(self, t):
         """Look up (keywords, filters/functions) referenced from the name
@@ -1009,7 +1009,7 @@
 
     def renderdefault(self, mapping):
         """Render the default unnamed template and return result as string"""
-        return self.render('', mapping)
+        return self.render(b'', mapping)
 
     def render(self, t, mapping):
         """Render the specified named template and return result as string"""
@@ -1028,7 +1028,7 @@
 
 def templatepaths():
     '''return locations used for template files.'''
-    pathsrel = ['templates']
+    pathsrel = [b'templates']
     paths = [os.path.normpath(os.path.join(util.datapath, f)) for f in pathsrel]
     return [p for p in paths if os.path.isdir(p)]
 
@@ -1069,8 +1069,8 @@
             and pycompat.osaltsep in style
         ):
             continue
-        locations = [os.path.join(style, 'map'), 'map-' + style]
-        locations.append('map')
+        locations = [os.path.join(style, b'map'), b'map-' + style]
+        locations.append(b'map')
 
         for path in paths:
             for location in locations:
@@ -1078,4 +1078,4 @@
                 if os.path.isfile(mapfile):
                     return style, mapfile
 
-    raise RuntimeError("No hgweb templates found in %r" % paths)
+    raise RuntimeError(b"No hgweb templates found in %r" % paths)
--- a/mercurial/templateutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/templateutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -130,7 +130,7 @@
 
     def getmember(self, context, mapping, key):
         raise error.ParseError(
-            _('%r is not a dictionary') % pycompat.bytestr(self._value)
+            _(b'%r is not a dictionary') % pycompat.bytestr(self._value)
         )
 
     def getmin(self, context, mapping):
@@ -141,17 +141,17 @@
 
     def _getby(self, context, mapping, func):
         if not self._value:
-            raise error.ParseError(_('empty string'))
+            raise error.ParseError(_(b'empty string'))
         return func(pycompat.iterbytestr(self._value))
 
     def filter(self, context, mapping, select):
         raise error.ParseError(
-            _('%r is not filterable') % pycompat.bytestr(self._value)
+            _(b'%r is not filterable') % pycompat.bytestr(self._value)
         )
 
     def itermaps(self, context):
         raise error.ParseError(
-            _('%r is not iterable of mappings') % pycompat.bytestr(self._value)
+            _(b'%r is not iterable of mappings') % pycompat.bytestr(self._value)
         )
 
     def join(self, context, mapping, sep):
@@ -174,27 +174,27 @@
         self._value = value
 
     def contains(self, context, mapping, item):
-        raise error.ParseError(_("%r is not iterable") % self._value)
+        raise error.ParseError(_(b"%r is not iterable") % self._value)
 
     def getmember(self, context, mapping, key):
-        raise error.ParseError(_('%r is not a dictionary') % self._value)
+        raise error.ParseError(_(b'%r is not a dictionary') % self._value)
 
     def getmin(self, context, mapping):
-        raise error.ParseError(_("%r is not iterable") % self._value)
+        raise error.ParseError(_(b"%r is not iterable") % self._value)
 
     def getmax(self, context, mapping):
-        raise error.ParseError(_("%r is not iterable") % self._value)
+        raise error.ParseError(_(b"%r is not iterable") % self._value)
 
     def filter(self, context, mapping, select):
-        raise error.ParseError(_("%r is not iterable") % self._value)
+        raise error.ParseError(_(b"%r is not iterable") % self._value)
 
     def itermaps(self, context):
         raise error.ParseError(
-            _('%r is not iterable of mappings') % self._value
+            _(b'%r is not iterable of mappings') % self._value
         )
 
     def join(self, context, mapping, sep):
-        raise error.ParseError(_('%r is not iterable') % self._value)
+        raise error.ParseError(_(b'%r is not iterable') % self._value)
 
     def show(self, context, mapping):
         if self._value is None:
@@ -216,35 +216,35 @@
 class date(mappable, wrapped):
     """Wrapper for date tuple"""
 
-    def __init__(self, value, showfmt='%d %d'):
+    def __init__(self, value, showfmt=b'%d %d'):
         # value may be (float, int), but public interface shouldn't support
         # floating-point timestamp
         self._unixtime, self._tzoffset = map(int, value)
         self._showfmt = showfmt
 
     def contains(self, context, mapping, item):
-        raise error.ParseError(_('date is not iterable'))
+        raise error.ParseError(_(b'date is not iterable'))
 
     def getmember(self, context, mapping, key):
-        raise error.ParseError(_('date is not a dictionary'))
+        raise error.ParseError(_(b'date is not a dictionary'))
 
     def getmin(self, context, mapping):
-        raise error.ParseError(_('date is not iterable'))
+        raise error.ParseError(_(b'date is not iterable'))
 
     def getmax(self, context, mapping):
-        raise error.ParseError(_('date is not iterable'))
+        raise error.ParseError(_(b'date is not iterable'))
 
     def filter(self, context, mapping, select):
-        raise error.ParseError(_('date is not iterable'))
+        raise error.ParseError(_(b'date is not iterable'))
 
     def join(self, context, mapping, sep):
-        raise error.ParseError(_("date is not iterable"))
+        raise error.ParseError(_(b"date is not iterable"))
 
     def show(self, context, mapping):
         return self._showfmt % (self._unixtime, self._tzoffset)
 
     def tomap(self, context):
-        return {'unixtime': self._unixtime, 'tzoffset': self._tzoffset}
+        return {b'unixtime': self._unixtime, b'tzoffset': self._tzoffset}
 
     def tobool(self, context, mapping):
         return True
@@ -278,8 +278,8 @@
 
     def getmember(self, context, mapping, key):
         # TODO: maybe split hybrid list/dict types?
-        if not util.safehasattr(self._values, 'get'):
-            raise error.ParseError(_('not a dictionary'))
+        if not util.safehasattr(self._values, b'get'):
+            raise error.ParseError(_(b'not a dictionary'))
         key = unwrapastype(context, mapping, key, self._keytype)
         return self._wrapvalue(key, self._values.get(key))
 
@@ -291,20 +291,20 @@
 
     def _getby(self, context, mapping, func):
         if not self._values:
-            raise error.ParseError(_('empty sequence'))
+            raise error.ParseError(_(b'empty sequence'))
         val = func(self._values)
         return self._wrapvalue(val, val)
 
     def _wrapvalue(self, key, val):
         if val is None:
             return
-        if util.safehasattr(val, '_makemap'):
+        if util.safehasattr(val, b'_makemap'):
             # a nested hybrid list/dict, which has its own way of map operation
             return val
         return hybriditem(None, key, val, self._makemap)
 
     def filter(self, context, mapping, select):
-        if util.safehasattr(self._values, 'get'):
+        if util.safehasattr(self._values, b'get'):
             values = {
                 k: v
                 for k, v in self._values.iteritems()
@@ -327,7 +327,7 @@
         # TODO: switch gen to (context, mapping) API?
         gen = self._gen
         if gen is None:
-            return self.join(context, mapping, ' ')
+            return self.join(context, mapping, b' ')
         if callable(gen):
             return gen()
         return gen
@@ -338,7 +338,7 @@
     def tovalue(self, context, mapping):
         # TODO: make it non-recursive for trivial lists/dicts
         xs = self._values
-        if util.safehasattr(xs, 'get'):
+        if util.safehasattr(xs, b'get'):
             return {
                 k: unwrapvalue(context, mapping, v) for k, v in xs.iteritems()
             }
@@ -413,28 +413,30 @@
     Template mappings may be nested.
     """
 
-    def __init__(self, name=None, tmpl=None, sep=''):
+    def __init__(self, name=None, tmpl=None, sep=b''):
         if name is not None and tmpl is not None:
-            raise error.ProgrammingError('name and tmpl are mutually exclusive')
+            raise error.ProgrammingError(
+                b'name and tmpl are mutually exclusive'
+            )
         self._name = name
         self._tmpl = tmpl
         self._defaultsep = sep
 
     def contains(self, context, mapping, item):
-        raise error.ParseError(_('not comparable'))
+        raise error.ParseError(_(b'not comparable'))
 
     def getmember(self, context, mapping, key):
-        raise error.ParseError(_('not a dictionary'))
+        raise error.ParseError(_(b'not a dictionary'))
 
     def getmin(self, context, mapping):
-        raise error.ParseError(_('not comparable'))
+        raise error.ParseError(_(b'not comparable'))
 
     def getmax(self, context, mapping):
-        raise error.ParseError(_('not comparable'))
+        raise error.ParseError(_(b'not comparable'))
 
     def filter(self, context, mapping, select):
         # implement if necessary; we'll need a wrapped type for a mapping dict
-        raise error.ParseError(_('not filterable without template'))
+        raise error.ParseError(_(b'not filterable without template'))
 
     def join(self, context, mapping, sep):
         mapsiter = _iteroverlaymaps(context, mapping, self.itermaps(context))
@@ -443,7 +445,7 @@
         elif self._tmpl:
             itemiter = (context.expand(self._tmpl, m) for m in mapsiter)
         else:
-            raise error.ParseError(_('not displayable without template'))
+            raise error.ParseError(_(b'not displayable without template'))
         return joinitems(itemiter, sep)
 
     def show(self, context, mapping):
@@ -472,7 +474,7 @@
     mapping dicts.
     """
 
-    def __init__(self, make, args=(), name=None, tmpl=None, sep=''):
+    def __init__(self, make, args=(), name=None, tmpl=None, sep=b''):
         super(mappinggenerator, self).__init__(name, tmpl, sep)
         self._make = make
         self._args = args
@@ -487,7 +489,7 @@
 class mappinglist(_mappingsequence):
     """Wrapper for list of template mappings"""
 
-    def __init__(self, mappings, name=None, tmpl=None, sep=''):
+    def __init__(self, mappings, name=None, tmpl=None, sep=b''):
         super(mappinglist, self).__init__(name, tmpl, sep)
         self._mappings = mappings
 
@@ -556,7 +558,7 @@
         return self._make(context, *self._args)
 
     def getmember(self, context, mapping, key):
-        raise error.ParseError(_('not a dictionary'))
+        raise error.ParseError(_(b'not a dictionary'))
 
     def getmin(self, context, mapping):
         return self._getby(context, mapping, min)
@@ -567,7 +569,7 @@
     def _getby(self, context, mapping, func):
         xs = self.tovalue(context, mapping)
         if not xs:
-            raise error.ParseError(_('empty sequence'))
+            raise error.ParseError(_(b'empty sequence'))
         return func(xs)
 
     @staticmethod
@@ -582,13 +584,13 @@
         return mappedgenerator(self._filteredgen, args)
 
     def itermaps(self, context):
-        raise error.ParseError(_('list of strings is not mappable'))
+        raise error.ParseError(_(b'list of strings is not mappable'))
 
     def join(self, context, mapping, sep):
         return joinitems(self._gen(context), sep)
 
     def show(self, context, mapping):
-        return self.join(context, mapping, '')
+        return self.join(context, mapping, b'')
 
     def tobool(self, context, mapping):
         return _nonempty(self._gen(context))
@@ -597,11 +599,11 @@
         return [stringify(context, mapping, x) for x in self._gen(context)]
 
 
-def hybriddict(data, key='key', value='value', fmt=None, gen=None):
+def hybriddict(data, key=b'key', value=b'value', fmt=None, gen=None):
     """Wrap data to support both dict-like and string-like operations"""
     prefmt = pycompat.identity
     if fmt is None:
-        fmt = '%s=%s'
+        fmt = b'%s=%s'
         prefmt = pycompat.bytestr
     return hybrid(
         gen,
@@ -615,7 +617,7 @@
     """Wrap data to support both list-like and string-like operations"""
     prefmt = pycompat.identity
     if fmt is None:
-        fmt = '%s'
+        fmt = b'%s'
         prefmt = pycompat.bytestr
     return hybrid(gen, data, lambda x: {name: x}, lambda x: fmt % prefmt(x))
 
@@ -625,11 +627,11 @@
     mapping,
     name,
     data,
-    key='key',
-    value='value',
+    key=b'key',
+    value=b'value',
     fmt=None,
     plural=None,
-    separator=' ',
+    separator=b' ',
 ):
     """Wrap data like hybriddict(), but also supports old-style list template
 
@@ -649,7 +651,7 @@
     element=None,
     fmt=None,
     plural=None,
-    separator=' ',
+    separator=b' ',
 ):
     """Wrap data like hybridlist(), but also supports old-style list template
 
@@ -668,14 +670,14 @@
     keywords.
     """
     # no need to provide {path} to old-style list template
-    c = [{'name': k, 'source': v} for k, v in copies]
-    f = _showcompatlist(context, mapping, name, c, plural='file_copies')
+    c = [{b'name': k, b'source': v} for k, v in copies]
+    f = _showcompatlist(context, mapping, name, c, plural=b'file_copies')
     copies = util.sortdict(copies)
     return hybrid(
         f,
         copies,
-        lambda k: {'name': k, 'path': k, 'source': copies[k]},
-        lambda k: '%s (%s)' % (k, copies[k]),
+        lambda k: {b'name': k, b'path': k, b'source': copies[k]},
+        lambda k: b'%s (%s)' % (k, copies[k]),
     )
 
 
@@ -687,10 +689,14 @@
     keywords.
     """
     f = _showcompatlist(context, mapping, name, files)
-    return hybrid(f, files, lambda x: {'file': x, 'path': x}, pycompat.identity)
+    return hybrid(
+        f, files, lambda x: {b'file': x, b'path': x}, pycompat.identity
+    )
 
 
-def _showcompatlist(context, mapping, name, values, plural=None, separator=' '):
+def _showcompatlist(
+    context, mapping, name, values, plural=None, separator=b' '
+):
     """Return a generator that renders old-style list template
 
     name is name of key in template map.
@@ -713,9 +719,9 @@
     expand 'end_foos'.
     """
     if not plural:
-        plural = name + 's'
+        plural = name + b's'
     if not values:
-        noname = 'no_' + plural
+        noname = b'no_' + plural
         if context.preload(noname):
             yield context.process(noname, mapping)
         return
@@ -728,7 +734,7 @@
                 r.update(mapping)
                 yield r
         return
-    startname = 'start_' + plural
+    startname = b'start_' + plural
     if context.preload(startname):
         yield context.process(startname, mapping)
 
@@ -749,7 +755,7 @@
         vmapping = context.overlaymap(mapping, vmapping)
         return context.process(tag, vmapping)
 
-    lastname = 'last_' + name
+    lastname = b'last_' + name
     if context.preload(lastname):
         last = values.pop()
     else:
@@ -758,7 +764,7 @@
         yield one(v)
     if last is not None:
         yield one(last, tag=lastname)
-    endname = 'end_' + plural
+    endname = b'end_' + plural
     if context.preload(endname):
         yield context.process(endname, mapping)
 
@@ -773,12 +779,12 @@
         # We can only hit this on Python 3, and it's here to guard
         # against infinite recursion.
         raise error.ProgrammingError(
-            'Mercurial IO including templates is done'
-            ' with bytes, not strings, got %r' % thing
+            b'Mercurial IO including templates is done'
+            b' with bytes, not strings, got %r' % thing
         )
     elif thing is None:
         pass
-    elif not util.safehasattr(thing, '__iter__'):
+    elif not util.safehasattr(thing, b'__iter__'):
         yield pycompat.bytestr(thing)
     else:
         for i in thing:
@@ -788,7 +794,7 @@
                 yield i
             elif i is None:
                 pass
-            elif not util.safehasattr(i, '__iter__'):
+            elif not util.safehasattr(i, b'__iter__'):
                 yield pycompat.bytestr(i)
             else:
                 for j in flatten(context, mapping, i):
@@ -895,7 +901,7 @@
     try:
         return dateutil.parsedate(thing)
     except AttributeError:
-        raise error.ParseError(err or _('not a date tuple nor a string'))
+        raise error.ParseError(err or _(b'not a date tuple nor a string'))
     except error.ParseError:
         if not err:
             raise
@@ -912,7 +918,7 @@
     try:
         return int(thing)
     except (TypeError, ValueError):
-        raise error.ParseError(err or _('not an integer'))
+        raise error.ParseError(err or _(b'not an integer'))
 
 
 def evalstring(context, mapping, arg):
@@ -943,7 +949,7 @@
     try:
         f = _unwrapfuncbytype[typ]
     except KeyError:
-        raise error.ProgrammingError('invalid type specified: %r' % typ)
+        raise error.ProgrammingError(b'invalid type specified: %r' % typ)
     return f(context, mapping, thing)
 
 
@@ -957,12 +963,12 @@
 
 def _recursivesymbolblocker(key):
     def showrecursion(context, mapping):
-        raise error.Abort(_("recursive reference '%s' in template") % key)
+        raise error.Abort(_(b"recursive reference '%s' in template") % key)
 
     return showrecursion
 
 
-def runsymbol(context, mapping, key, default=''):
+def runsymbol(context, mapping, key, default=b''):
     v = context.symbol(mapping, key)
     if v is None:
         # put poison to cut recursion. we can't move this to parsing phase
@@ -1003,8 +1009,8 @@
     fn = pycompat.sysbytes(filt.__name__)
     sym = findsymbolicname(arg)
     if not sym:
-        return _("incompatible use of template filter '%s'") % fn
-    return _("template filter '%s' is not compatible with keyword '%s'") % (
+        return _(b"incompatible use of template filter '%s'") % fn
+    return _(b"template filter '%s' is not compatible with keyword '%s'") % (
         fn,
         sym,
     )
@@ -1015,7 +1021,7 @@
     of partial mappings to override the original"""
     for i, nm in enumerate(newmappings):
         lm = context.overlaymap(origmapping, nm)
-        lm['index'] = i
+        lm[b'index'] = i
         yield lm
 
 
@@ -1026,7 +1032,7 @@
         sym = findsymbolicname(darg)
         if not sym:
             raise
-        hint = _("keyword '%s' does not support map operation") % sym
+        hint = _(b"keyword '%s' does not support map operation") % sym
         raise error.ParseError(bytes(err), hint=hint)
     for lm in _iteroverlaymaps(context, mapping, diter):
         yield evalrawexp(context, lm, targ)
@@ -1050,13 +1056,13 @@
         sym = findsymbolicname(darg)
         if not sym:
             raise
-        hint = _("keyword '%s' does not support member operation") % sym
+        hint = _(b"keyword '%s' does not support member operation") % sym
         raise error.ParseError(bytes(err), hint=hint)
 
 
 def runnegate(context, mapping, data):
     data = evalinteger(
-        context, mapping, data, _('negation needs an integer argument')
+        context, mapping, data, _(b'negation needs an integer argument')
     )
     return -data
 
@@ -1064,15 +1070,15 @@
 def runarithmetic(context, mapping, data):
     func, left, right = data
     left = evalinteger(
-        context, mapping, left, _('arithmetic only defined on integers')
+        context, mapping, left, _(b'arithmetic only defined on integers')
     )
     right = evalinteger(
-        context, mapping, right, _('arithmetic only defined on integers')
+        context, mapping, right, _(b'arithmetic only defined on integers')
     )
     try:
         return func(left, right)
     except ZeroDivisionError:
-        raise error.Abort(_('division by zero is not defined'))
+        raise error.Abort(_(b'division by zero is not defined'))
 
 
 def joinitems(itemiter, sep):
--- a/mercurial/testing/storage.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/testing/storage.py	Sun Oct 06 09:48:39 2019 -0400
@@ -40,8 +40,8 @@
 
     def testempty(self):
         f = self._makefilefn()
-        self.assertEqual(len(f), 0, 'new file store has 0 length by default')
-        self.assertEqual(list(f), [], 'iter yields nothing by default')
+        self.assertEqual(len(f), 0, b'new file store has 0 length by default')
+        self.assertEqual(list(f), [], b'iter yields nothing by default')
 
         gen = iter(f)
         with self.assertRaises(StopIteration):
@@ -414,7 +414,7 @@
         self.assertEqual(f.storageinfo(), {})
         self.assertEqual(
             f.storageinfo(revisionscount=True, trackedsize=True),
-            {'revisionscount': 0, 'trackedsize': 0},
+            {b'revisionscount': 0, b'trackedsize': 0},
         )
 
         self.assertEqual(f.size(nullrev), 0)
@@ -472,7 +472,7 @@
         self.assertEqual(f.storageinfo(), {})
         self.assertEqual(
             f.storageinfo(revisionscount=True, trackedsize=True),
-            {'revisionscount': 1, 'trackedsize': len(fulltext)},
+            {b'revisionscount': 1, b'trackedsize': len(fulltext)},
         )
 
         self.assertEqual(f.size(0), len(fulltext))
@@ -541,8 +541,10 @@
         self.assertEqual(
             f.storageinfo(revisionscount=True, trackedsize=True),
             {
-                'revisionscount': 3,
-                'trackedsize': len(fulltext0) + len(fulltext1) + len(fulltext2),
+                b'revisionscount': 3,
+                b'trackedsize': len(fulltext0)
+                + len(fulltext1)
+                + len(fulltext2),
             },
         )
 
@@ -679,17 +681,19 @@
 
         # Unrecognized nodesorder value raises ProgrammingError.
         with self.assertRaises(error.ProgrammingError):
-            list(f.emitrevisions([], nodesorder='bad'))
+            list(f.emitrevisions([], nodesorder=b'bad'))
 
         # nodesorder=storage is recognized. But we can't test it thoroughly
         # because behavior is storage-dependent.
-        res = list(f.emitrevisions([node2, node1, node0], nodesorder='storage'))
+        res = list(
+            f.emitrevisions([node2, node1, node0], nodesorder=b'storage')
+        )
         self.assertEqual(len(res), 3)
         self.assertEqual({o.node for o in res}, {node0, node1, node2})
 
         # nodesorder=nodes forces the order.
         gen = f.emitrevisions(
-            [node2, node0], nodesorder='nodes', revisiondata=True
+            [node2, node0], nodesorder=b'nodes', revisiondata=True
         )
 
         rev = next(gen)
--- a/mercurial/transaction.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/transaction.py	Sun Oct 06 09:48:39 2019 -0400
@@ -28,18 +28,20 @@
 # These are the file generators that should only be executed after the
 # finalizers are done, since they rely on the output of the finalizers (like
 # the changelog having been written).
-postfinalizegenerators = {'bookmarks', 'dirstate'}
+postfinalizegenerators = {b'bookmarks', b'dirstate'}
 
-gengroupall = 'all'
-gengroupprefinalize = 'prefinalize'
-gengrouppostfinalize = 'postfinalize'
+gengroupall = b'all'
+gengroupprefinalize = b'prefinalize'
+gengrouppostfinalize = b'postfinalize'
 
 
 def active(func):
     def _active(self, *args, **kwds):
         if self._count == 0:
             raise error.Abort(
-                _('cannot use transaction when it is already committed/aborted')
+                _(
+                    b'cannot use transaction when it is already committed/aborted'
+                )
             )
         return func(self, *args, **kwds)
 
@@ -58,21 +60,21 @@
 ):
     for f, o, _ignore in entries:
         if o or not unlink:
-            checkambig = checkambigfiles and (f, '') in checkambigfiles
+            checkambig = checkambigfiles and (f, b'') in checkambigfiles
             try:
-                fp = opener(f, 'a', checkambig=checkambig)
+                fp = opener(f, b'a', checkambig=checkambig)
                 if fp.tell() < o:
                     raise error.Abort(
                         _(
-                            "attempted to truncate %s to %d bytes, but it was "
-                            "already %d bytes\n"
+                            b"attempted to truncate %s to %d bytes, but it was "
+                            b"already %d bytes\n"
                         )
                         % (f, o, fp.tell())
                     )
                 fp.truncate(o)
                 fp.close()
             except IOError:
-                report(_("failed to truncate %s\n") % f)
+                report(_(b"failed to truncate %s\n") % f)
                 raise
         else:
             try:
@@ -84,7 +86,7 @@
     backupfiles = []
     for l, f, b, c in backupentries:
         if l not in vfsmap and c:
-            report("couldn't handle %s: unknown cache location %s\n" % (b, l))
+            report(b"couldn't handle %s: unknown cache location %s\n" % (b, l))
         vfs = vfsmap[l]
         try:
             if f and b:
@@ -95,7 +97,7 @@
                     util.copyfile(backuppath, filepath, checkambig=checkambig)
                     backupfiles.append(b)
                 except IOError:
-                    report(_("failed to recover %s\n") % f)
+                    report(_(b"failed to recover %s\n") % f)
             else:
                 target = f or b
                 try:
@@ -107,7 +109,7 @@
             if not c:
                 raise
 
-    backuppath = "%s.backupfiles" % journal
+    backuppath = b"%s.backupfiles" % journal
     if opener.exists(backuppath):
         opener.unlink(backuppath)
     opener.unlink(journal)
@@ -155,7 +157,7 @@
         self._opener = opener
         # a map to access file in various {location -> vfs}
         vfsmap = vfsmap.copy()
-        vfsmap[''] = opener  # set default value
+        vfsmap[b''] = opener  # set default value
         self._vfsmap = vfsmap
         self._after = after
         self._entries = []
@@ -186,7 +188,7 @@
 
         # a dict of arguments to be passed to hooks
         self.hookargs = {}
-        self._file = opener.open(self._journal, "w")
+        self._file = opener.open(self._journal, b"w")
 
         # a list of ('location', 'path', 'backuppath', cache) entries.
         # - if 'backuppath' is empty, no file existed at backup time
@@ -196,9 +198,9 @@
         # (cache is currently unused)
         self._backupentries = []
         self._backupmap = {}
-        self._backupjournal = "%s.backupfiles" % self._journal
-        self._backupsfile = opener.open(self._backupjournal, 'w')
-        self._backupsfile.write('%d\n' % version)
+        self._backupjournal = b"%s.backupfiles" % self._journal
+        self._backupsfile = opener.open(self._backupjournal, b'w')
+        self._backupsfile.write(b'%d\n' % version)
 
         if createmode is not None:
             opener.chmod(self._journal, createmode & 0o666)
@@ -265,11 +267,11 @@
         self._entries.append((file, offset, data))
         self._map[file] = len(self._entries) - 1
         # add enough data to the journal to do the truncate
-        self._file.write("%s\0%d\n" % (file, offset))
+        self._file.write(b"%s\0%d\n" % (file, offset))
         self._file.flush()
 
     @active
-    def addbackup(self, file, hardlink=True, location=''):
+    def addbackup(self, file, hardlink=True, location=b''):
         """Adds a backup of the file to the transaction
 
         Calling addbackup() creates a hardlink backup of the specified file
@@ -280,21 +282,21 @@
         * `hardlink`: use a hardlink to quickly create the backup
         """
         if self._queue:
-            msg = 'cannot use transaction.addbackup inside "group"'
+            msg = b'cannot use transaction.addbackup inside "group"'
             raise error.ProgrammingError(msg)
 
         if file in self._map or file in self._backupmap:
             return
         vfs = self._vfsmap[location]
         dirname, filename = vfs.split(file)
-        backupfilename = "%s.backup.%s" % (self._journal, filename)
+        backupfilename = b"%s.backup.%s" % (self._journal, filename)
         backupfile = vfs.reljoin(dirname, backupfilename)
         if vfs.exists(file):
             filepath = vfs.join(file)
             backuppath = vfs.join(backupfile)
             util.copyfile(filepath, backuppath, hardlink=hardlink)
         else:
-            backupfile = ''
+            backupfile = b''
 
         self._addbackupentry((location, file, backupfile, False))
 
@@ -302,20 +304,22 @@
         """register a new backup entry and write it to disk"""
         self._backupentries.append(entry)
         self._backupmap[entry[1]] = len(self._backupentries) - 1
-        self._backupsfile.write("%s\0%s\0%s\0%d\n" % entry)
+        self._backupsfile.write(b"%s\0%s\0%s\0%d\n" % entry)
         self._backupsfile.flush()
 
     @active
-    def registertmp(self, tmpfile, location=''):
+    def registertmp(self, tmpfile, location=b''):
         """register a temporary transaction file
 
         Such files will be deleted when the transaction exits (on both
         failure and success).
         """
-        self._addbackupentry((location, '', tmpfile, False))
+        self._addbackupentry((location, b'', tmpfile, False))
 
     @active
-    def addfilegenerator(self, genid, filenames, genfunc, order=0, location=''):
+    def addfilegenerator(
+        self, genid, filenames, genfunc, order=0, location=b''
+    ):
         """add a function to generates some files at transaction commit
 
         The `genfunc` argument is a function capable of generating proper
@@ -348,7 +352,7 @@
         if genid in self._filegenerators:
             del self._filegenerators[genid]
 
-    def _generatefiles(self, suffix='', group=gengroupall):
+    def _generatefiles(self, suffix=b'', group=gengroupall):
         # write files registered for generation
         any = False
         for id, entry in sorted(self._filegenerators.iteritems()):
@@ -375,7 +379,7 @@
                         self.addbackup(name, location=location)
                         checkambig = (name, location) in self._checkambigfiles
                     files.append(
-                        vfs(name, 'w', atomictemp=True, checkambig=checkambig)
+                        vfs(name, b'w', atomictemp=True, checkambig=checkambig)
                     )
                 genfunc(*files)
                 for f in files:
@@ -406,7 +410,7 @@
             raise KeyError(file)
         index = self._map[file]
         self._entries[index] = (file, offset, data)
-        self._file.write("%s\0%d\n" % (file, offset))
+        self._file.write(b"%s\0%d\n" % (file, offset))
         self._file.flush()
 
     @active
@@ -448,7 +452,7 @@
             # remove callback since the data will have been flushed
             any = self._pendingcallback.pop(cat)(self)
             self._anypending = self._anypending or any
-        self._anypending |= self._generatefiles(suffix='.pending')
+        self._anypending |= self._generatefiles(suffix=b'.pending')
         return self._anypending
 
     @active
@@ -512,7 +516,7 @@
         for l, f, b, c in self._backupentries:
             if l not in self._vfsmap and c:
                 self._report(
-                    "couldn't remove %s: unknown cache location %s\n" % (b, l)
+                    b"couldn't remove %s: unknown cache location %s\n" % (b, l)
                 )
                 continue
             vfs = self._vfsmap[l]
@@ -524,7 +528,7 @@
                         raise
                     # Abort may be raise by read only opener
                     self._report(
-                        "couldn't remove %s: %s\n" % (vfs.join(b), inst)
+                        b"couldn't remove %s: %s\n" % (vfs.join(b), inst)
                     )
         self._entries = []
         self._writeundo()
@@ -538,7 +542,8 @@
         for l, _f, b, c in self._backupentries:
             if l not in self._vfsmap and c:
                 self._report(
-                    "couldn't remove %s: unknown cache location" "%s\n" % (b, l)
+                    b"couldn't remove %s: unknown cache location"
+                    b"%s\n" % (b, l)
                 )
                 continue
             vfs = self._vfsmap[l]
@@ -550,7 +555,7 @@
                         raise
                     # Abort may be raise by read only opener
                     self._report(
-                        "couldn't remove %s: %s\n" % (vfs.join(b), inst)
+                        b"couldn't remove %s: %s\n" % (vfs.join(b), inst)
                     )
         self._backupentries = []
         self._journal = None
@@ -577,19 +582,19 @@
         if self._undoname is None:
             return
         undobackupfile = self._opener.open(
-            "%s.backupfiles" % self._undoname, 'w'
+            b"%s.backupfiles" % self._undoname, b'w'
         )
-        undobackupfile.write('%d\n' % version)
+        undobackupfile.write(b'%d\n' % version)
         for l, f, b, c in self._backupentries:
             if not f:  # temporary file
                 continue
             if not b:
-                u = ''
+                u = b''
             else:
                 if l not in self._vfsmap and c:
                     self._report(
-                        "couldn't remove %s: unknown cache location"
-                        "%s\n" % (b, l)
+                        b"couldn't remove %s: unknown cache location"
+                        b"%s\n" % (b, l)
                     )
                     continue
                 vfs = self._vfsmap[l]
@@ -598,7 +603,7 @@
                 uname = name.replace(self._journal, self._undoname, 1)
                 u = vfs.reljoin(base, uname)
                 util.copyfile(vfs.join(b), vfs.join(u), hardlink=True)
-            undobackupfile.write("%s\0%s\0%s\0%d\n" % (l, f, u, c))
+            undobackupfile.write(b"%s\0%s\0%s\0%d\n" % (l, f, u, c))
         undobackupfile.close()
 
     def _abort(self):
@@ -615,7 +620,7 @@
                     self._opener.unlink(self._journal)
                 return
 
-            self._report(_("transaction abort!\n"))
+            self._report(_(b"transaction abort!\n"))
 
             try:
                 for cat in sorted(self._abortcallback):
@@ -632,11 +637,11 @@
                     False,
                     checkambigfiles=self._checkambigfiles,
                 )
-                self._report(_("rollback completed\n"))
+                self._report(_(b"rollback completed\n"))
             except BaseException as exc:
-                self._report(_("rollback failed - please run hg recover\n"))
+                self._report(_(b"rollback failed - please run hg recover\n"))
                 self._report(
-                    _("(failure reason: %s)\n") % stringutil.forcebytestr(exc)
+                    _(b"(failure reason: %s)\n") % stringutil.forcebytestr(exc)
                 )
         finally:
             self._journal = None
@@ -668,12 +673,14 @@
     fp.close()
     for l in lines:
         try:
-            f, o = l.split('\0')
+            f, o = l.split(b'\0')
             entries.append((f, int(o), None))
         except ValueError:
-            report(_("couldn't read journal entry %r!\n") % pycompat.bytestr(l))
+            report(
+                _(b"couldn't read journal entry %r!\n") % pycompat.bytestr(l)
+            )
 
-    backupjournal = "%s.backupfiles" % file
+    backupjournal = b"%s.backupfiles" % file
     if opener.exists(backupjournal):
         fp = opener.open(backupjournal)
         lines = fp.readlines()
@@ -684,13 +691,13 @@
                     if line:
                         # Shave off the trailing newline
                         line = line[:-1]
-                        l, f, b, c = line.split('\0')
+                        l, f, b, c = line.split(b'\0')
                         backupentries.append((l, f, b, bool(c)))
             else:
                 report(
                     _(
-                        "journal was created by a different version of "
-                        "Mercurial\n"
+                        b"journal was created by a different version of "
+                        b"Mercurial\n"
                     )
                 )
 
--- a/mercurial/treediscovery.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/treediscovery.py	Sun Oct 06 09:48:39 2019 -0400
@@ -39,7 +39,7 @@
 
     if not heads:
         with remote.commandexecutor() as e:
-            heads = e.callcommand('heads', {}).result()
+            heads = e.callcommand(b'heads', {}).result()
 
     if repo.changelog.tip() == nullid:
         base.add(nullid)
@@ -49,7 +49,7 @@
 
     # assume we're closer to the tip than the root
     # and start by examining the heads
-    repo.ui.status(_("searching for changes\n"))
+    repo.ui.status(_(b"searching for changes\n"))
 
     unknown = []
     for h in heads:
@@ -63,14 +63,14 @@
 
     req = set(unknown)
     reqcnt = 0
-    progress = repo.ui.makeprogress(_('searching'), unit=_('queries'))
+    progress = repo.ui.makeprogress(_(b'searching'), unit=_(b'queries'))
 
     # search through remote branches
     # a 'branch' here is a linear segment of history, with four parts:
     # head, root, first parent, second parent
     # (a branch always has two parents (or none) by definition)
     with remote.commandexecutor() as e:
-        branches = e.callcommand('branches', {'nodes': unknown}).result()
+        branches = e.callcommand(b'branches', {b'nodes': unknown}).result()
 
     unknown = collections.deque(branches)
     while unknown:
@@ -80,15 +80,15 @@
             if n[0] in seen:
                 continue
 
-            repo.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
+            repo.ui.debug(b"examining %s:%s\n" % (short(n[0]), short(n[1])))
             if n[0] == nullid:  # found the end of the branch
                 pass
             elif n in seenbranch:
-                repo.ui.debug("branch already found\n")
+                repo.ui.debug(b"branch already found\n")
                 continue
             elif n[1] and knownnode(n[1]):  # do we know the base?
                 repo.ui.debug(
-                    "found incomplete branch %s:%s\n"
+                    b"found incomplete branch %s:%s\n"
                     % (short(n[0]), short(n[1]))
                 )
                 search.append(n[0:2])  # schedule branch range for scanning
@@ -96,7 +96,7 @@
             else:
                 if n[1] not in seen and n[1] not in fetch:
                     if knownnode(n[2]) and knownnode(n[3]):
-                        repo.ui.debug("found new changeset %s\n" % short(n[1]))
+                        repo.ui.debug(b"found new changeset %s\n" % short(n[1]))
                         fetch.add(n[1])  # earliest unknown
                     for p in n[2:4]:
                         if knownnode(p):
@@ -112,17 +112,17 @@
             reqcnt += 1
             progress.increment()
             repo.ui.debug(
-                "request %d: %s\n" % (reqcnt, " ".join(map(short, r)))
+                b"request %d: %s\n" % (reqcnt, b" ".join(map(short, r)))
             )
             for p in pycompat.xrange(0, len(r), 10):
                 with remote.commandexecutor() as e:
                     branches = e.callcommand(
-                        'branches', {'nodes': r[p : p + 10],}
+                        b'branches', {b'nodes': r[p : p + 10],}
                     ).result()
 
                 for b in branches:
                     repo.ui.debug(
-                        "received %s:%s\n" % (short(b[0]), short(b[1]))
+                        b"received %s:%s\n" % (short(b[0]), short(b[1]))
                     )
                     unknown.append(b)
 
@@ -133,24 +133,24 @@
         progress.increment()
 
         with remote.commandexecutor() as e:
-            between = e.callcommand('between', {'pairs': search}).result()
+            between = e.callcommand(b'between', {b'pairs': search}).result()
 
         for n, l in zip(search, between):
             l.append(n[1])
             p = n[0]
             f = 1
             for i in l:
-                repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
+                repo.ui.debug(b"narrowing %d:%d %s\n" % (f, len(l), short(i)))
                 if knownnode(i):
                     if f <= 2:
                         repo.ui.debug(
-                            "found new branch changeset %s\n" % short(p)
+                            b"found new branch changeset %s\n" % short(p)
                         )
                         fetch.add(p)
                         base.add(i)
                     else:
                         repo.ui.debug(
-                            "narrowed branch search to %s:%s\n"
+                            b"narrowed branch search to %s:%s\n"
                             % (short(p), short(i))
                         )
                         newsearch.append((p, i))
@@ -161,22 +161,22 @@
     # sanity check our fetch list
     for f in fetch:
         if knownnode(f):
-            raise error.RepoError(_("already have changeset ") + short(f[:4]))
+            raise error.RepoError(_(b"already have changeset ") + short(f[:4]))
 
     base = list(base)
     if base == [nullid]:
         if force:
-            repo.ui.warn(_("warning: repository is unrelated\n"))
+            repo.ui.warn(_(b"warning: repository is unrelated\n"))
         else:
-            raise error.Abort(_("repository is unrelated"))
+            raise error.Abort(_(b"repository is unrelated"))
 
     repo.ui.debug(
-        "found new changesets starting at "
-        + " ".join([short(f) for f in fetch])
-        + "\n"
+        b"found new changesets starting at "
+        + b" ".join([short(f) for f in fetch])
+        + b"\n"
     )
 
     progress.complete()
-    repo.ui.debug("%d total queries\n" % reqcnt)
+    repo.ui.debug(b"%d total queries\n" % reqcnt)
 
     return base, list(fetch), heads
--- a/mercurial/txnutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/txnutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -16,7 +16,7 @@
     '''return whether 'root' may have pending changes, which are
     visible to this process.
     '''
-    return root == encoding.environ.get('HG_PENDING')
+    return root == encoding.environ.get(b'HG_PENDING')
 
 
 def trypending(root, vfs, filename, **kwargs):
@@ -29,7 +29,7 @@
     '''
     if mayhavepending(root):
         try:
-            return (vfs('%s.pending' % filename, **kwargs), True)
+            return (vfs(b'%s.pending' % filename, **kwargs), True)
         except IOError as inst:
             if inst.errno != errno.ENOENT:
                 raise
--- a/mercurial/ui.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/ui.py	Sun Oct 06 09:48:39 2019 -0400
@@ -46,7 +46,7 @@
 urlreq = util.urlreq
 
 # for use with str.translate(None, _keepalnum), to keep just alphanumerics
-_keepalnum = ''.join(
+_keepalnum = b''.join(
     c for c in map(pycompat.bytechr, range(256)) if not c.isalnum()
 )
 
@@ -79,7 +79,7 @@
 """
 
 samplehgrcs = {
-    'user': b"""# example user config (see 'hg help config' for more info)
+    b'user': b"""# example user config (see 'hg help config' for more info)
 [ui]
 # name and email, e.g.
 # username = Jane Doe <jdoe@example.com>
@@ -106,7 +106,7 @@
 # rebase =
 # uncommit =
 """,
-    'cloned': b"""# example repository config (see 'hg help config' for more info)
+    b'cloned': b"""# example repository config (see 'hg help config' for more info)
 [paths]
 default = %s
 
@@ -121,7 +121,7 @@
 # name and email (local to this repository, optional), e.g.
 # username = Jane Doe <jdoe@example.com>
 """,
-    'local': b"""# example repository config (see 'hg help config' for more info)
+    b'local': b"""# example repository config (see 'hg help config' for more info)
 [paths]
 # path aliases to other clones of this repo in URLs or filesystem paths
 # (see 'hg help config.paths' for more info)
@@ -135,7 +135,7 @@
 # name and email (local to this repository, optional), e.g.
 # username = Jane Doe <jdoe@example.com>
 """,
-    'global': b"""# example system-wide hg config (see 'hg help config' for more info)
+    b'global': b"""# example system-wide hg config (see 'hg help config' for more info)
 
 [ui]
 # uncomment to disable color in command output
@@ -285,8 +285,8 @@
             self.httppasswordmgrdb = httppasswordmgrdbproxy()
             self._blockedtimes = collections.defaultdict(int)
 
-        allowed = self.configlist('experimental', 'exportableenviron')
-        if '*' in allowed:
+        allowed = self.configlist(b'experimental', b'exportableenviron')
+        if b'*' in allowed:
             self._exportableenviron = self.environ
         else:
             self._exportableenviron = {}
@@ -300,9 +300,9 @@
         u = cls()
         # we always trust global config files and environment variables
         for t, f in rcutil.rccomponents():
-            if t == 'path':
+            if t == b'path':
                 u.readconfig(f, trust=True)
-            elif t == 'items':
+            elif t == b'items':
                 sections = set()
                 for section, name, value, source in f:
                     # do not set u._ocfg
@@ -313,14 +313,14 @@
                 for section in sections:
                     u.fixconfig(section=section)
             else:
-                raise error.ProgrammingError('unknown rctype: %s' % t)
+                raise error.ProgrammingError(b'unknown rctype: %s' % t)
         u._maybetweakdefaults()
         return u
 
     def _maybetweakdefaults(self):
-        if not self.configbool('ui', 'tweakdefaults'):
+        if not self.configbool(b'ui', b'tweakdefaults'):
             return
-        if self._tweaked or self.plain('tweakdefaults'):
+        if self._tweaked or self.plain(b'tweakdefaults'):
             return
 
         # Note: it is SUPER IMPORTANT that you set self._tweaked to
@@ -331,11 +331,11 @@
         # avoid this weirdness.
         self._tweaked = True
         tmpcfg = config.config()
-        tmpcfg.parse('<tweakdefaults>', tweakrc)
+        tmpcfg.parse(b'<tweakdefaults>', tweakrc)
         for section in tmpcfg:
             for name, value in tmpcfg.items(section):
                 if not self.hasconfig(section, name):
-                    self.setconfig(section, name, value, "<tweakdefaults>")
+                    self.setconfig(section, name, value, b"<tweakdefaults>")
 
     def copy(self):
         return self.__class__(self)
@@ -353,7 +353,7 @@
         try:
             yield
         finally:
-            self._blockedtimes[key + '_blocked'] += (
+            self._blockedtimes[key + b'_blocked'] += (
                 util.timer() - starttime
             ) * 1000
 
@@ -366,9 +366,9 @@
         lets you advise Mercurial that something risky is happening so
         that control-C etc can be blocked if desired.
         """
-        enabled = self.configbool('experimental', 'nointerrupt')
+        enabled = self.configbool(b'experimental', b'nointerrupt')
         if enabled and self.configbool(
-            'experimental', 'nointerrupt-interactiveonly'
+            b'experimental', b'nointerrupt-interactiveonly'
         ):
             enabled = self.interactive()
         if self._uninterruptible or not enabled:
@@ -379,9 +379,9 @@
             return
 
         def warn():
-            self.warn(_("shutting down cleanly\n"))
+            self.warn(_(b"shutting down cleanly\n"))
             self.warn(
-                _("press ^C again to terminate immediately (dangerous)\n")
+                _(b"press ^C again to terminate immediately (dangerous)\n")
             )
             return True
 
@@ -401,7 +401,7 @@
             return True
 
         tusers, tgroups = self._trustusers, self._trustgroups
-        if '*' in tusers or '*' in tgroups:
+        if b'*' in tusers or b'*' in tgroups:
             return True
 
         user = util.username(st.st_uid)
@@ -411,7 +411,10 @@
 
         if self._reportuntrusted:
             self.warn(
-                _('not trusting file %s from untrusted ' 'user %s, group %s\n')
+                _(
+                    b'not trusting file %s from untrusted '
+                    b'user %s, group %s\n'
+                )
                 % (f, user, group)
             )
         return False
@@ -435,37 +438,37 @@
         except error.ConfigError as inst:
             if trusted:
                 raise
-            self.warn(_("ignored: %s\n") % stringutil.forcebytestr(inst))
+            self.warn(_(b"ignored: %s\n") % stringutil.forcebytestr(inst))
 
         if self.plain():
             for k in (
-                'debug',
-                'fallbackencoding',
-                'quiet',
-                'slash',
-                'logtemplate',
-                'message-output',
-                'statuscopies',
-                'style',
-                'traceback',
-                'verbose',
+                b'debug',
+                b'fallbackencoding',
+                b'quiet',
+                b'slash',
+                b'logtemplate',
+                b'message-output',
+                b'statuscopies',
+                b'style',
+                b'traceback',
+                b'verbose',
             ):
-                if k in cfg['ui']:
-                    del cfg['ui'][k]
-            for k, v in cfg.items('defaults'):
-                del cfg['defaults'][k]
-            for k, v in cfg.items('commands'):
-                del cfg['commands'][k]
+                if k in cfg[b'ui']:
+                    del cfg[b'ui'][k]
+            for k, v in cfg.items(b'defaults'):
+                del cfg[b'defaults'][k]
+            for k, v in cfg.items(b'commands'):
+                del cfg[b'commands'][k]
         # Don't remove aliases from the configuration if in the exceptionlist
-        if self.plain('alias'):
-            for k, v in cfg.items('alias'):
-                del cfg['alias'][k]
-        if self.plain('revsetalias'):
-            for k, v in cfg.items('revsetalias'):
-                del cfg['revsetalias'][k]
-        if self.plain('templatealias'):
-            for k, v in cfg.items('templatealias'):
-                del cfg['templatealias'][k]
+        if self.plain(b'alias'):
+            for k, v in cfg.items(b'alias'):
+                del cfg[b'alias'][k]
+        if self.plain(b'revsetalias'):
+            for k, v in cfg.items(b'revsetalias'):
+                del cfg[b'revsetalias'][k]
+        if self.plain(b'templatealias'):
+            for k, v in cfg.items(b'templatealias'):
+                del cfg[b'templatealias'][k]
 
         if trusted:
             self._tcfg.update(cfg)
@@ -474,51 +477,51 @@
         self._ucfg.update(self._ocfg)
 
         if root is None:
-            root = os.path.expanduser('~')
+            root = os.path.expanduser(b'~')
         self.fixconfig(root=root)
 
     def fixconfig(self, root=None, section=None):
-        if section in (None, 'paths'):
+        if section in (None, b'paths'):
             # expand vars and ~
             # translate paths relative to root (or home) into absolute paths
             root = root or encoding.getcwd()
             for c in self._tcfg, self._ucfg, self._ocfg:
-                for n, p in c.items('paths'):
+                for n, p in c.items(b'paths'):
                     # Ignore sub-options.
-                    if ':' in n:
+                    if b':' in n:
                         continue
                     if not p:
                         continue
-                    if '%%' in p:
-                        s = self.configsource('paths', n) or 'none'
+                    if b'%%' in p:
+                        s = self.configsource(b'paths', n) or b'none'
                         self.warn(
-                            _("(deprecated '%%' in path %s=%s from %s)\n")
+                            _(b"(deprecated '%%' in path %s=%s from %s)\n")
                             % (n, p, s)
                         )
-                        p = p.replace('%%', '%')
+                        p = p.replace(b'%%', b'%')
                     p = util.expandpath(p)
                     if not util.hasscheme(p) and not os.path.isabs(p):
                         p = os.path.normpath(os.path.join(root, p))
-                    c.set("paths", n, p)
+                    c.set(b"paths", n, p)
 
-        if section in (None, 'ui'):
+        if section in (None, b'ui'):
             # update ui options
             self._fmsgout, self._fmsgerr = _selectmsgdests(self)
-            self.debugflag = self.configbool('ui', 'debug')
-            self.verbose = self.debugflag or self.configbool('ui', 'verbose')
-            self.quiet = not self.debugflag and self.configbool('ui', 'quiet')
+            self.debugflag = self.configbool(b'ui', b'debug')
+            self.verbose = self.debugflag or self.configbool(b'ui', b'verbose')
+            self.quiet = not self.debugflag and self.configbool(b'ui', b'quiet')
             if self.verbose and self.quiet:
                 self.quiet = self.verbose = False
             self._reportuntrusted = self.debugflag or self.configbool(
-                "ui", "report_untrusted"
+                b"ui", b"report_untrusted"
             )
-            self.tracebackflag = self.configbool('ui', 'traceback')
-            self.logblockedtimes = self.configbool('ui', 'logblockedtimes')
+            self.tracebackflag = self.configbool(b'ui', b'traceback')
+            self.logblockedtimes = self.configbool(b'ui', b'logblockedtimes')
 
-        if section in (None, 'trusted'):
+        if section in (None, b'trusted'):
             # update trust information
-            self._trustusers.update(self.configlist('trusted', 'users'))
-            self._trustgroups.update(self.configlist('trusted', 'groups'))
+            self._trustusers.update(self.configlist(b'trusted', b'users'))
+            self._trustgroups.update(self.configlist(b'trusted', b'groups'))
 
         if section in (None, b'devel', b'ui') and self.debugflag:
             tracked = set()
@@ -540,7 +543,7 @@
         self._tcfg.restore(data[1])
         self._ucfg.restore(data[2])
 
-    def setconfig(self, section, name, value, source=''):
+    def setconfig(self, section, name, value, source=b''):
         for cfg in (self._ocfg, self._tcfg, self._ucfg):
             cfg.set(section, name, value, source)
         self.fixconfig(section=section)
@@ -573,18 +576,18 @@
             else:
                 itemdefault = item.default
         else:
-            msg = "accessing unregistered config item: '%s.%s'"
+            msg = b"accessing unregistered config item: '%s.%s'"
             msg %= (section, name)
-            self.develwarn(msg, 2, 'warn-config-unknown')
+            self.develwarn(msg, 2, b'warn-config-unknown')
 
         if default is _unset:
             if item is None:
                 value = default
             elif item.default is configitems.dynamicdefault:
                 value = None
-                msg = "config item requires an explicit default value: '%s.%s'"
+                msg = b"config item requires an explicit default value: '%s.%s'"
                 msg %= (section, name)
-                self.develwarn(msg, 2, 'warn-config-default')
+                self.develwarn(msg, 2, b'warn-config-default')
             else:
                 value = itemdefault
         elif (
@@ -593,11 +596,11 @@
             and default != itemdefault
         ):
             msg = (
-                "specifying a mismatched default value for a registered "
-                "config item: '%s.%s' '%s'"
+                b"specifying a mismatched default value for a registered "
+                b"config item: '%s.%s' '%s'"
             )
             msg %= (section, name, pycompat.bytestr(default))
-            self.develwarn(msg, 2, 'warn-config-default')
+            self.develwarn(msg, 2, b'warn-config-default')
 
         for s, n in alternates:
             candidate = self._data(untrusted).get(s, n, None)
@@ -610,8 +613,8 @@
                 uvalue = self._ucfg.get(s, n)
                 if uvalue is not None and uvalue != value:
                     self.debug(
-                        "ignoring untrusted configuration option "
-                        "%s.%s = %s\n" % (s, n, uvalue)
+                        b"ignoring untrusted configuration option "
+                        b"%s.%s = %s\n" % (s, n, uvalue)
                     )
         return value
 
@@ -628,31 +631,31 @@
         main = self.config(section, name, default, untrusted=untrusted)
         data = self._data(untrusted)
         sub = {}
-        prefix = '%s:' % name
+        prefix = b'%s:' % name
         for k, v in data.items(section):
             if k.startswith(prefix):
                 sub[k[len(prefix) :]] = v
 
         if self.debugflag and not untrusted and self._reportuntrusted:
             for k, v in sub.items():
-                uvalue = self._ucfg.get(section, '%s:%s' % (name, k))
+                uvalue = self._ucfg.get(section, b'%s:%s' % (name, k))
                 if uvalue is not None and uvalue != v:
                     self.debug(
-                        'ignoring untrusted configuration option '
-                        '%s:%s.%s = %s\n' % (section, name, k, uvalue)
+                        b'ignoring untrusted configuration option '
+                        b'%s:%s.%s = %s\n' % (section, name, k, uvalue)
                     )
 
         return main, sub
 
     def configpath(self, section, name, default=_unset, untrusted=False):
-        'get a path config item, expanded relative to repo root or config file'
+        b'get a path config item, expanded relative to repo root or config file'
         v = self.config(section, name, default, untrusted)
         if v is None:
             return None
-        if not os.path.isabs(v) or "://" not in v:
+        if not os.path.isabs(v) or b"://" not in v:
             src = self.configsource(section, name, untrusted)
-            if ':' in src:
-                base = os.path.dirname(src.rsplit(':')[0])
+            if b':' in src:
+                base = os.path.dirname(src.rsplit(b':')[0])
                 v = os.path.join(base, os.path.expanduser(v))
         return v
 
@@ -689,7 +692,7 @@
         b = stringutil.parsebool(v)
         if b is None:
             raise error.ConfigError(
-                _("%s.%s is not a boolean ('%s')") % (section, name, v)
+                _(b"%s.%s is not a boolean ('%s')") % (section, name, v)
             )
         return b
 
@@ -727,7 +730,7 @@
             if desc is None:
                 desc = pycompat.sysbytes(convert.__name__)
             raise error.ConfigError(
-                _("%s.%s is not a valid %s ('%s')") % (section, name, desc, v)
+                _(b"%s.%s is not a valid %s ('%s')") % (section, name, desc, v)
             )
 
     def configint(self, section, name, default=_unset, untrusted=False):
@@ -750,7 +753,7 @@
         """
 
         return self.configwith(
-            int, section, name, default, 'integer', untrusted
+            int, section, name, default, b'integer', untrusted
         )
 
     def configbytes(self, section, name, default=_unset, untrusted=False):
@@ -786,7 +789,7 @@
             return util.sizetoint(value)
         except error.ParseError:
             raise error.ConfigError(
-                _("%s.%s is not a byte quantity ('%s')")
+                _(b"%s.%s is not a byte quantity ('%s')")
                 % (section, name, value)
             )
 
@@ -804,7 +807,7 @@
         """
         # default is not always a list
         v = self.configwith(
-            config.parselist, section, name, default, 'list', untrusted
+            config.parselist, section, name, default, b'list', untrusted
         )
         if isinstance(v, bytes):
             return config.parselist(v)
@@ -822,7 +825,7 @@
         """
         if self.config(section, name, default, untrusted):
             return self.configwith(
-                dateutil.parsedate, section, name, default, 'date', untrusted
+                dateutil.parsedate, section, name, default, b'date', untrusted
             )
         if default is _unset:
             return None
@@ -849,13 +852,13 @@
     def configitems(self, section, untrusted=False, ignoresub=False):
         items = self._data(untrusted).items(section)
         if ignoresub:
-            items = [i for i in items if ':' not in i[0]]
+            items = [i for i in items if b':' not in i[0]]
         if self.debugflag and not untrusted and self._reportuntrusted:
             for k, v in self._ucfg.items(section):
                 if self._tcfg.get(section, k) != v:
                     self.debug(
-                        "ignoring untrusted configuration option "
-                        "%s.%s = %s\n" % (section, k, v)
+                        b"ignoring untrusted configuration option "
+                        b"%s.%s = %s\n" % (section, k, v)
                     )
         return items
 
@@ -882,16 +885,18 @@
         - True otherwise
         '''
         if (
-            'HGPLAIN' not in encoding.environ
-            and 'HGPLAINEXCEPT' not in encoding.environ
+            b'HGPLAIN' not in encoding.environ
+            and b'HGPLAINEXCEPT' not in encoding.environ
         ):
             return False
         exceptions = (
-            encoding.environ.get('HGPLAINEXCEPT', '').strip().split(',')
+            encoding.environ.get(b'HGPLAINEXCEPT', b'').strip().split(b',')
         )
         # TODO: add support for HGPLAIN=+feature,-feature syntax
-        if '+strictflags' not in encoding.environ.get('HGPLAIN', '').split(','):
-            exceptions.append('strictflags')
+        if b'+strictflags' not in encoding.environ.get(b'HGPLAIN', b'').split(
+            b','
+        ):
+            exceptions.append(b'strictflags')
         if feature and exceptions:
             return feature not in exceptions
         return True
@@ -906,34 +911,34 @@
         ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
         If no username could be found, raise an Abort error.
         """
-        user = encoding.environ.get("HGUSER")
+        user = encoding.environ.get(b"HGUSER")
         if user is None:
-            user = self.config("ui", "username")
+            user = self.config(b"ui", b"username")
             if user is not None:
                 user = os.path.expandvars(user)
         if user is None:
-            user = encoding.environ.get("EMAIL")
+            user = encoding.environ.get(b"EMAIL")
         if user is None and acceptempty:
             return user
-        if user is None and self.configbool("ui", "askusername"):
-            user = self.prompt(_("enter a commit username:"), default=None)
+        if user is None and self.configbool(b"ui", b"askusername"):
+            user = self.prompt(_(b"enter a commit username:"), default=None)
         if user is None and not self.interactive():
             try:
-                user = '%s@%s' % (
+                user = b'%s@%s' % (
                     procutil.getuser(),
                     encoding.strtolocal(socket.getfqdn()),
                 )
-                self.warn(_("no username found, using '%s' instead\n") % user)
+                self.warn(_(b"no username found, using '%s' instead\n") % user)
             except KeyError:
                 pass
         if not user:
             raise error.Abort(
-                _('no username supplied'),
-                hint=_("use 'hg config --edit' " 'to set your username'),
+                _(b'no username supplied'),
+                hint=_(b"use 'hg config --edit' " b'to set your username'),
             )
-        if "\n" in user:
+        if b"\n" in user:
             raise error.Abort(
-                _("username %r contains a newline\n") % pycompat.bytestr(user)
+                _(b"username %r contains a newline\n") % pycompat.bytestr(user)
             )
         return user
 
@@ -1030,7 +1035,7 @@
         else:
             self._bufferapplylabels = None
 
-        return "".join(self._buffers.pop())
+        return b"".join(self._buffers.pop())
 
     def _isbuffered(self, dest):
         if dest is self._fout:
@@ -1048,7 +1053,7 @@
     def canbatchlabeledwrites(self):
         '''check if write calls with labels are batchable'''
         # Windows color printing is special, see ``write``.
-        return self._colormode != 'win32'
+        return self._colormode != b'win32'
 
     def write(self, *args, **opts):
         '''write args to output
@@ -1070,7 +1075,7 @@
 
         # inlined _write() for speed
         if self._buffers:
-            label = opts.get(r'label', '')
+            label = opts.get(r'label', b'')
             if label and self._bufferapplylabels:
                 self._buffers[-1].extend(self.label(a, label) for a in args)
             else:
@@ -1084,19 +1089,19 @@
         # opencode timeblockedsection because this is a critical path
         starttime = util.timer()
         try:
-            if self._colormode == 'win32':
+            if self._colormode == b'win32':
                 # windows color printing is its own can of crab, defer to
                 # the color module and that is it.
                 color.win32print(self, dest.write, msg, **opts)
             else:
                 if self._colormode is not None:
-                    label = opts.get(r'label', '')
+                    label = opts.get(r'label', b'')
                     msg = self.label(msg, label)
                 dest.write(msg)
         except IOError as err:
             raise error.StdioError(err)
         finally:
-            self._blockedtimes['stdio_blocked'] += (
+            self._blockedtimes[b'stdio_blocked'] += (
                 util.timer() - starttime
             ) * 1000
 
@@ -1106,7 +1111,7 @@
     def _write(self, dest, *args, **opts):
         # update write() as well if you touch this code
         if self._isbuffered(dest):
-            label = opts.get(r'label', '')
+            label = opts.get(r'label', b'')
             if label and self._bufferapplylabels:
                 self._buffers[-1].extend(self.label(a, label) for a in args)
             else:
@@ -1128,13 +1133,13 @@
                 # channel for machine-readable output with metadata, where
                 # no extra colorization is necessary.
                 dest.write(msg, **opts)
-            elif self._colormode == 'win32':
+            elif self._colormode == b'win32':
                 # windows color printing is its own can of crab, defer to
                 # the color module and that is it.
                 color.win32print(self, dest.write, msg, **opts)
             else:
                 if self._colormode is not None:
-                    label = opts.get(r'label', '')
+                    label = opts.get(r'label', b'')
                     msg = self.label(msg, label)
                 dest.write(msg)
             # stderr may be buffered under win32 when redirected to files,
@@ -1151,7 +1156,7 @@
                 return
             raise error.StdioError(err)
         finally:
-            self._blockedtimes['stdio_blocked'] += (
+            self._blockedtimes[b'stdio_blocked'] += (
                 util.timer() - starttime
             ) * 1000
 
@@ -1177,12 +1182,12 @@
                     if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
                         raise error.StdioError(err)
         finally:
-            self._blockedtimes['stdio_blocked'] += (
+            self._blockedtimes[b'stdio_blocked'] += (
                 util.timer() - starttime
             ) * 1000
 
     def _isatty(self, fh):
-        if self.configbool('ui', 'nontty'):
+        if self.configbool(b'ui', b'nontty'):
             return False
         return procutil.isatty(fh)
 
@@ -1239,27 +1244,27 @@
             # how pager should do is already determined
             return
 
-        if not command.startswith('internal-always-') and (
+        if not command.startswith(b'internal-always-') and (
             # explicit --pager=on (= 'internal-always-' prefix) should
             # take precedence over disabling factors below
-            command in self.configlist('pager', 'ignore')
-            or not self.configbool('ui', 'paginate')
-            or not self.configbool('pager', 'attend-' + command, True)
-            or encoding.environ.get('TERM') == 'dumb'
+            command in self.configlist(b'pager', b'ignore')
+            or not self.configbool(b'ui', b'paginate')
+            or not self.configbool(b'pager', b'attend-' + command, True)
+            or encoding.environ.get(b'TERM') == b'dumb'
             # TODO: if we want to allow HGPLAINEXCEPT=pager,
             # formatted() will need some adjustment.
             or not self.formatted()
             or self.plain()
             or self._buffers
             # TODO: expose debugger-enabled on the UI object
-            or '--debugger' in pycompat.sysargv
+            or b'--debugger' in pycompat.sysargv
         ):
             # We only want to paginate if the ui appears to be
             # interactive, the user didn't say HGPLAIN or
             # HGPLAINEXCEPT=pager, and the user didn't specify --debug.
             return
 
-        pagercmd = self.config('pager', 'pager', rcutil.fallbackpager)
+        pagercmd = self.config(b'pager', b'pager', rcutil.fallbackpager)
         if not pagercmd:
             return
 
@@ -1269,25 +1274,25 @@
                 pagerenv[name] = value
 
         self.debug(
-            'starting pager for command %s\n' % stringutil.pprint(command)
+            b'starting pager for command %s\n' % stringutil.pprint(command)
         )
         self.flush()
 
         wasformatted = self.formatted()
-        if util.safehasattr(signal, "SIGPIPE"):
+        if util.safehasattr(signal, b"SIGPIPE"):
             signal.signal(signal.SIGPIPE, _catchterm)
         if self._runpager(pagercmd, pagerenv):
             self.pageractive = True
             # Preserve the formatted-ness of the UI. This is important
             # because we mess with stdout, which might confuse
             # auto-detection of things being formatted.
-            self.setconfig('ui', 'formatted', wasformatted, 'pager')
-            self.setconfig('ui', 'interactive', False, 'pager')
+            self.setconfig(b'ui', b'formatted', wasformatted, b'pager')
+            self.setconfig(b'ui', b'interactive', False, b'pager')
 
             # If pagermode differs from color.mode, reconfigure color now that
             # pageractive is set.
             cm = self._colormode
-            if cm != self.config('color', 'pagermode', cm):
+            if cm != self.config(b'color', b'pagermode', cm):
                 color.setup(self)
         else:
             # If the pager can't be spawned in dispatch when --pager=on is
@@ -1301,14 +1306,14 @@
         This is separate in part so that extensions (like chg) can
         override how a pager is invoked.
         """
-        if command == 'cat':
+        if command == b'cat':
             # Save ourselves some work.
             return False
         # If the command doesn't contain any of these characters, we
         # assume it's a binary and exec it directly. This means for
         # simple pager command configurations, we can degrade
         # gracefully and tell the user about their broken pager.
-        shell = any(c in command for c in "|&;<>()$`\\\"' \t\n*?[#~=%")
+        shell = any(c in command for c in b"|&;<>()$`\\\"' \t\n*?[#~=%")
 
         if pycompat.iswindows and not shell:
             # Window's built-in `more` cannot be invoked with shell=False, but
@@ -1319,7 +1324,7 @@
             fullcmd = procutil.findexe(command)
             if not fullcmd:
                 self.warn(
-                    _("missing pager command '%s', skipping pager\n") % command
+                    _(b"missing pager command '%s', skipping pager\n") % command
                 )
                 return False
 
@@ -1339,7 +1344,7 @@
         except OSError as e:
             if e.errno == errno.ENOENT and not shell:
                 self.warn(
-                    _("missing pager command '%s', skipping pager\n") % command
+                    _(b"missing pager command '%s', skipping pager\n") % command
                 )
                 return False
             raise
@@ -1354,7 +1359,7 @@
 
         @self.atexit
         def killpager():
-            if util.safehasattr(signal, "SIGINT"):
+            if util.safehasattr(signal, b"SIGINT"):
                 signal.signal(signal.SIGINT, signal.SIG_IGN)
             # restore original fds, closing pager.stdin copies in the process
             os.dup2(stdoutfd, procutil.stdout.fileno())
@@ -1397,51 +1402,51 @@
         Then histedit will use the text interface and chunkselector will use
         the default curses interface (crecord at the moment).
         """
-        alldefaults = frozenset(["text", "curses"])
+        alldefaults = frozenset([b"text", b"curses"])
 
         featureinterfaces = {
-            "chunkselector": ["text", "curses",],
-            "histedit": ["text", "curses",],
+            b"chunkselector": [b"text", b"curses",],
+            b"histedit": [b"text", b"curses",],
         }
 
         # Feature-specific interface
         if feature not in featureinterfaces.keys():
             # Programming error, not user error
-            raise ValueError("Unknown feature requested %s" % feature)
+            raise ValueError(b"Unknown feature requested %s" % feature)
 
         availableinterfaces = frozenset(featureinterfaces[feature])
         if alldefaults > availableinterfaces:
             # Programming error, not user error. We need a use case to
             # define the right thing to do here.
             raise ValueError(
-                "Feature %s does not handle all default interfaces" % feature
+                b"Feature %s does not handle all default interfaces" % feature
             )
 
-        if self.plain() or encoding.environ.get('TERM') == 'dumb':
-            return "text"
+        if self.plain() or encoding.environ.get(b'TERM') == b'dumb':
+            return b"text"
 
         # Default interface for all the features
-        defaultinterface = "text"
-        i = self.config("ui", "interface")
+        defaultinterface = b"text"
+        i = self.config(b"ui", b"interface")
         if i in alldefaults:
             defaultinterface = i
 
         choseninterface = defaultinterface
-        f = self.config("ui", "interface.%s" % feature)
+        f = self.config(b"ui", b"interface.%s" % feature)
         if f in availableinterfaces:
             choseninterface = f
 
         if i is not None and defaultinterface != i:
             if f is not None:
-                self.warn(_("invalid value for ui.interface: %s\n") % (i,))
+                self.warn(_(b"invalid value for ui.interface: %s\n") % (i,))
             else:
                 self.warn(
-                    _("invalid value for ui.interface: %s (using %s)\n")
+                    _(b"invalid value for ui.interface: %s (using %s)\n")
                     % (i, choseninterface)
                 )
         if f is not None and choseninterface != f:
             self.warn(
-                _("invalid value for ui.interface.%s: %s (using %s)\n")
+                _(b"invalid value for ui.interface.%s: %s (using %s)\n")
                 % (feature, f, choseninterface)
             )
 
@@ -1461,7 +1466,7 @@
 
         This function refers to input only; for output, see `ui.formatted()'.
         '''
-        i = self.configbool("ui", "interactive")
+        i = self.configbool(b"ui", b"interactive")
         if i is None:
             # some environments replace stdin without implementing isatty
             # usually those are non-interactive
@@ -1472,9 +1477,9 @@
     def termwidth(self):
         '''how wide is the terminal in columns?
         '''
-        if 'COLUMNS' in encoding.environ:
+        if b'COLUMNS' in encoding.environ:
             try:
-                return int(encoding.environ['COLUMNS'])
+                return int(encoding.environ[b'COLUMNS'])
             except ValueError:
                 pass
         return scmutil.termsize(self)[0]
@@ -1499,7 +1504,7 @@
         if self.plain():
             return False
 
-        i = self.configbool("ui", "formatted")
+        i = self.configbool(b"ui", b"formatted")
         if i is None:
             # some environments replace stdout without implementing isatty
             # usually those are non-interactive
@@ -1507,7 +1512,7 @@
 
         return i
 
-    def _readline(self, prompt=' ', promptopts=None):
+    def _readline(self, prompt=b' ', promptopts=None):
         # Replacing stdin/stdout temporarily is a hard problem on Python 3
         # because they have to be text streams with *no buffering*. Instead,
         # we use rawinput() only if call_readline() will be invoked by
@@ -1530,20 +1535,20 @@
             except Exception:
                 usereadline = False
 
-        if self._colormode == 'win32' or not usereadline:
+        if self._colormode == b'win32' or not usereadline:
             if not promptopts:
                 promptopts = {}
             self._writemsgnobuf(
-                self._fmsgout, prompt, type='prompt', **promptopts
+                self._fmsgout, prompt, type=b'prompt', **promptopts
             )
             self.flush()
-            prompt = ' '
+            prompt = b' '
         else:
-            prompt = self.label(prompt, 'ui.prompt') + ' '
+            prompt = self.label(prompt, b'ui.prompt') + b' '
 
         # prompt ' ' must exist; otherwise readline may delete entire line
         # - http://bugs.python.org/issue12833
-        with self.timeblockedsection('stdio'):
+        with self.timeblockedsection(b'stdio'):
             if usereadline:
                 line = encoding.strtolocal(pycompat.rawinput(prompt))
                 # When stdin is in binary mode on Windows, it can cause
@@ -1560,7 +1565,7 @@
 
         return line
 
-    def prompt(self, msg, default="y"):
+    def prompt(self, msg, default=b"y"):
         """Prompt user with msg, read response.
         If ui is not interactive, the default is returned.
         """
@@ -1569,17 +1574,17 @@
     def _prompt(self, msg, **opts):
         default = opts[r'default']
         if not self.interactive():
-            self._writemsg(self._fmsgout, msg, ' ', type='prompt', **opts)
+            self._writemsg(self._fmsgout, msg, b' ', type=b'prompt', **opts)
             self._writemsg(
-                self._fmsgout, default or '', "\n", type='promptecho'
+                self._fmsgout, default or b'', b"\n", type=b'promptecho'
             )
             return default
         try:
             r = self._readline(prompt=msg, promptopts=opts)
             if not r:
                 r = default
-            if self.configbool('ui', 'promptecho'):
-                self._writemsg(self._fmsgout, r, "\n", type='promptecho')
+            if self.configbool(b'ui', b'promptecho'):
+                self._writemsg(self._fmsgout, r, b"\n", type=b'promptecho')
             return r
         except EOFError:
             raise error.ResponseExpected()
@@ -1606,11 +1611,11 @@
         # except an ampersand followed by a character.
         m = re.match(br'(?s)(.+?)\$\$([^\$]*&[^ \$].*)', prompt)
         msg = m.group(1)
-        choices = [p.strip(' ') for p in m.group(2).split('$$')]
+        choices = [p.strip(b' ') for p in m.group(2).split(b'$$')]
 
         def choicetuple(s):
-            ampidx = s.index('&')
-            return s[ampidx + 1 : ampidx + 2].lower(), s.replace('&', '', 1)
+            ampidx = s.index(b'&')
+            return s[ampidx + 1 : ampidx + 2].lower(), s.replace(b'&', b'', 1)
 
         return (msg, [choicetuple(s) for s in choices])
 
@@ -1632,7 +1637,7 @@
             if r.lower() in resps:
                 return resps.index(r.lower())
             # TODO: shouldn't it be a warning?
-            self._writemsg(self._fmsgout, _("unrecognized response\n"))
+            self._writemsg(self._fmsgout, _(b"unrecognized response\n"))
 
     def getpass(self, prompt=None, default=None):
         if not self.interactive():
@@ -1640,18 +1645,18 @@
         try:
             self._writemsg(
                 self._fmsgerr,
-                prompt or _('password: '),
-                type='prompt',
+                prompt or _(b'password: '),
+                type=b'prompt',
                 password=True,
             )
             # disable getpass() only if explicitly specified. it's still valid
             # to interact with tty even if fin is not a tty.
-            with self.timeblockedsection('stdio'):
-                if self.configbool('ui', 'nontty'):
+            with self.timeblockedsection(b'stdio'):
+                if self.configbool(b'ui', b'nontty'):
                     l = self._fin.readline()
                     if not l:
                         raise EOFError
-                    return l.rstrip('\n')
+                    return l.rstrip(b'\n')
                 else:
                     return getpass.getpass(r'')
         except EOFError:
@@ -1663,21 +1668,21 @@
         This adds an output label of "ui.status".
         '''
         if not self.quiet:
-            self._writemsg(self._fmsgout, type='status', *msg, **opts)
+            self._writemsg(self._fmsgout, type=b'status', *msg, **opts)
 
     def warn(self, *msg, **opts):
         '''write warning message to output (stderr)
 
         This adds an output label of "ui.warning".
         '''
-        self._writemsg(self._fmsgerr, type='warning', *msg, **opts)
+        self._writemsg(self._fmsgerr, type=b'warning', *msg, **opts)
 
     def error(self, *msg, **opts):
         '''write error message to output (stderr)
 
         This adds an output label of "ui.error".
         '''
-        self._writemsg(self._fmsgerr, type='error', *msg, **opts)
+        self._writemsg(self._fmsgerr, type=b'error', *msg, **opts)
 
     def note(self, *msg, **opts):
         '''write note to output (if ui.verbose is True)
@@ -1685,7 +1690,7 @@
         This adds an output label of "ui.note".
         '''
         if self.verbose:
-            self._writemsg(self._fmsgout, type='note', *msg, **opts)
+            self._writemsg(self._fmsgout, type=b'note', *msg, **opts)
 
     def debug(self, *msg, **opts):
         '''write debug message to output (if ui.debugflag is True)
@@ -1693,7 +1698,7 @@
         This adds an output label of "ui.debug".
         '''
         if self.debugflag:
-            self._writemsg(self._fmsgout, type='debug', *msg, **opts)
+            self._writemsg(self._fmsgout, type=b'debug', *msg, **opts)
             self.log(b'debug', b'%s', b''.join(msg))
 
     def edit(
@@ -1708,60 +1713,62 @@
     ):
         if action is None:
             self.develwarn(
-                'action is None but will soon be a required '
-                'parameter to ui.edit()'
+                b'action is None but will soon be a required '
+                b'parameter to ui.edit()'
             )
         extra_defaults = {
-            'prefix': 'editor',
-            'suffix': '.txt',
+            b'prefix': b'editor',
+            b'suffix': b'.txt',
         }
         if extra is not None:
-            if extra.get('suffix') is not None:
+            if extra.get(b'suffix') is not None:
                 self.develwarn(
-                    'extra.suffix is not None but will soon be '
-                    'ignored by ui.edit()'
+                    b'extra.suffix is not None but will soon be '
+                    b'ignored by ui.edit()'
                 )
             extra_defaults.update(extra)
         extra = extra_defaults
 
-        if action == 'diff':
-            suffix = '.diff'
+        if action == b'diff':
+            suffix = b'.diff'
         elif action:
-            suffix = '.%s.hg.txt' % action
+            suffix = b'.%s.hg.txt' % action
         else:
-            suffix = extra['suffix']
+            suffix = extra[b'suffix']
 
         rdir = None
-        if self.configbool('experimental', 'editortmpinhg'):
+        if self.configbool(b'experimental', b'editortmpinhg'):
             rdir = repopath
         (fd, name) = pycompat.mkstemp(
-            prefix='hg-' + extra['prefix'] + '-', suffix=suffix, dir=rdir
+            prefix=b'hg-' + extra[b'prefix'] + b'-', suffix=suffix, dir=rdir
         )
         try:
             f = os.fdopen(fd, r'wb')
             f.write(util.tonativeeol(text))
             f.close()
 
-            environ = {'HGUSER': user}
-            if 'transplant_source' in extra:
-                environ.update({'HGREVISION': hex(extra['transplant_source'])})
-            for label in ('intermediate-source', 'source', 'rebase_source'):
+            environ = {b'HGUSER': user}
+            if b'transplant_source' in extra:
+                environ.update(
+                    {b'HGREVISION': hex(extra[b'transplant_source'])}
+                )
+            for label in (b'intermediate-source', b'source', b'rebase_source'):
                 if label in extra:
-                    environ.update({'HGREVISION': extra[label]})
+                    environ.update({b'HGREVISION': extra[label]})
                     break
             if editform:
-                environ.update({'HGEDITFORM': editform})
+                environ.update({b'HGEDITFORM': editform})
             if pending:
-                environ.update({'HG_PENDING': pending})
+                environ.update({b'HG_PENDING': pending})
 
             editor = self.geteditor()
 
             self.system(
-                "%s \"%s\"" % (editor, name),
+                b"%s \"%s\"" % (editor, name),
                 environ=environ,
                 onerr=error.Abort,
-                errprefix=_("edit failed"),
-                blockedtag='editor',
+                errprefix=_(b"edit failed"),
+                blockedtag=b'editor',
             )
 
             f = open(name, r'rb')
@@ -1791,19 +1798,19 @@
             # Long cmds tend to be because of an absolute path on cmd. Keep
             # the tail end instead
             cmdsuffix = cmd.translate(None, _keepalnum)[-85:]
-            blockedtag = 'unknown_system_' + cmdsuffix
+            blockedtag = b'unknown_system_' + cmdsuffix
         out = self._fout
         if any(s[1] for s in self._bufferstates):
             out = self
         with self.timeblockedsection(blockedtag):
             rc = self._runsystem(cmd, environ=environ, cwd=cwd, out=out)
         if rc and onerr:
-            errmsg = '%s %s' % (
+            errmsg = b'%s %s' % (
                 os.path.basename(cmd.split(None, 1)[0]),
                 procutil.explainexit(rc),
             )
             if errprefix:
-                errmsg = '%s: %s' % (errprefix, errmsg)
+                errmsg = b'%s: %s' % (errprefix, errmsg)
             raise onerr(errmsg)
         return rc
 
@@ -1828,10 +1835,10 @@
 
                 # exclude frame where 'exc' was chained and rethrown from exctb
                 self.write_err(
-                    'Traceback (most recent call last):\n',
-                    ''.join(exctb[:-1]),
-                    ''.join(causetb),
-                    ''.join(exconly),
+                    b'Traceback (most recent call last):\n',
+                    b''.join(exctb[:-1]),
+                    b''.join(causetb),
+                    b''.join(exconly),
                 )
             else:
                 output = traceback.format_exception(exc[0], exc[1], exc[2])
@@ -1840,15 +1847,15 @@
 
     def geteditor(self):
         '''return editor to use'''
-        if pycompat.sysplatform == 'plan9':
+        if pycompat.sysplatform == b'plan9':
             # vi is the MIPS instruction simulator on Plan 9. We
             # instead default to E to plumb commit messages to
             # avoid confusion.
-            editor = 'E'
+            editor = b'E'
         else:
-            editor = 'vi'
-        return encoding.environ.get("HGEDITOR") or self.config(
-            "ui", "editor", editor
+            editor = b'vi'
+        return encoding.environ.get(b"HGEDITOR") or self.config(
+            b"ui", b"editor", editor
         )
 
     @util.propertycache
@@ -1857,7 +1864,7 @@
         if (
             self.quiet
             or self.debugflag
-            or self.configbool('progress', 'disable')
+            or self.configbool(b'progress', b'disable')
             or not progress.shouldprint(self)
         ):
             return None
@@ -1870,7 +1877,7 @@
         if self._progbar is not None and self._progbar.printed:
             self._progbar.clear()
 
-    def progress(self, topic, pos, item="", unit="", total=None):
+    def progress(self, topic, pos, item=b"", unit=b"", total=None):
         '''show a progress message
 
         By default a textual progress bar will be displayed if an operation
@@ -1885,14 +1892,16 @@
         All topics should be marked closed by setting pos to None at
         termination.
         '''
-        self.deprecwarn("use ui.makeprogress() instead of ui.progress()", "5.1")
+        self.deprecwarn(
+            b"use ui.makeprogress() instead of ui.progress()", b"5.1"
+        )
         progress = self.makeprogress(topic, unit, total)
         if pos is not None:
             progress.update(pos, item=item)
         else:
             progress.complete()
 
-    def makeprogress(self, topic, unit="", total=None):
+    def makeprogress(self, topic, unit=b"", total=None):
         """Create a progress helper for the specified topic"""
         if getattr(self._fmsgerr, 'structured', False):
             # channel for machine-readable output with metadata, just send
@@ -1981,25 +1990,26 @@
         Use 'stacklevel' to report the offender some layers further up in the
         stack.
         """
-        if not self.configbool('devel', 'all-warnings'):
-            if config is None or not self.configbool('devel', config):
+        if not self.configbool(b'devel', b'all-warnings'):
+            if config is None or not self.configbool(b'devel', config):
                 return
-        msg = 'devel-warn: ' + msg
+        msg = b'devel-warn: ' + msg
         stacklevel += 1  # get in develwarn
         if self.tracebackflag:
             util.debugstacktrace(msg, stacklevel, self._ferr, self._fout)
             self.log(
-                'develwarn',
-                '%s at:\n%s' % (msg, ''.join(util.getstackframes(stacklevel))),
+                b'develwarn',
+                b'%s at:\n%s'
+                % (msg, b''.join(util.getstackframes(stacklevel))),
             )
         else:
             curframe = inspect.currentframe()
             calframe = inspect.getouterframes(curframe, 2)
             fname, lineno, fmsg = calframe[stacklevel][1:4]
             fname, fmsg = pycompat.sysbytes(fname), pycompat.sysbytes(fmsg)
-            self.write_err('%s at: %s:%d (%s)\n' % (msg, fname, lineno, fmsg))
+            self.write_err(b'%s at: %s:%d (%s)\n' % (msg, fname, lineno, fmsg))
             self.log(
-                'develwarn', '%s at: %s:%d (%s)\n', msg, fname, lineno, fmsg
+                b'develwarn', b'%s at: %s:%d (%s)\n', msg, fname, lineno, fmsg
             )
             curframe = calframe = None  # avoid cycles
 
@@ -2010,15 +2020,15 @@
         - version: last version where the API will be supported,
         """
         if not (
-            self.configbool('devel', 'all-warnings')
-            or self.configbool('devel', 'deprec-warn')
+            self.configbool(b'devel', b'all-warnings')
+            or self.configbool(b'devel', b'deprec-warn')
         ):
             return
         msg += (
-            "\n(compatibility will be dropped after Mercurial-%s,"
-            " update your code.)"
+            b"\n(compatibility will be dropped after Mercurial-%s,"
+            b" update your code.)"
         ) % version
-        self.develwarn(msg, stacklevel=stacklevel, config='deprec-warn')
+        self.develwarn(msg, stacklevel=stacklevel, config=b'deprec-warn')
 
     def exportableenviron(self):
         """The environment variables that are safe to export, e.g. through
@@ -2027,7 +2037,7 @@
         return self._exportableenviron
 
     @contextlib.contextmanager
-    def configoverride(self, overrides, source=""):
+    def configoverride(self, overrides, source=b""):
         """Context manager for temporary config overrides
         `overrides` must be a dict of the following structure:
         {(section, name) : value}"""
@@ -2042,8 +2052,8 @@
                 self.restoreconfig(backup)
             # just restoring ui.quiet config to the previous value is not enough
             # as it does not update ui.quiet class member
-            if ('ui', 'quiet') in overrides:
-                self.fixconfig(section='ui')
+            if (b'ui', b'quiet') in overrides:
+                self.fixconfig(section=b'ui')
 
 
 class paths(dict):
@@ -2056,11 +2066,11 @@
     def __init__(self, ui):
         dict.__init__(self)
 
-        for name, loc in ui.configitems('paths', ignoresub=True):
+        for name, loc in ui.configitems(b'paths', ignoresub=True):
             # No location is the same as not existing.
             if not loc:
                 continue
-            loc, sub = ui.configsuboptions('paths', name)
+            loc, sub = ui.configsuboptions(b'paths', name)
             self[name] = path(ui, name, rawloc=loc, suboptions=sub)
 
     def getpath(self, name, default=None):
@@ -2098,7 +2108,7 @@
                 # We don't pass sub-options in, so no need to pass ui instance.
                 return path(None, None, rawloc=name)
             except ValueError:
-                raise error.RepoError(_('repository %s does not exist') % name)
+                raise error.RepoError(_(b'repository %s does not exist') % name)
 
 
 _pathsuboptions = {}
@@ -2126,19 +2136,22 @@
     return register
 
 
-@pathsuboption('pushurl', 'pushloc')
+@pathsuboption(b'pushurl', b'pushloc')
 def pushurlpathoption(ui, path, value):
     u = util.url(value)
     # Actually require a URL.
     if not u.scheme:
-        ui.warn(_('(paths.%s:pushurl not a URL; ignoring)\n') % path.name)
+        ui.warn(_(b'(paths.%s:pushurl not a URL; ignoring)\n') % path.name)
         return None
 
     # Don't support the #foo syntax in the push URL to declare branch to
     # push.
     if u.fragment:
         ui.warn(
-            _('("#fragment" in paths.%s:pushurl not supported; ' 'ignoring)\n')
+            _(
+                b'("#fragment" in paths.%s:pushurl not supported; '
+                b'ignoring)\n'
+            )
             % path.name
         )
         u.fragment = None
@@ -2146,7 +2159,7 @@
     return bytes(u)
 
 
-@pathsuboption('pushrev', 'pushrev')
+@pathsuboption(b'pushrev', b'pushrev')
 def pushrevpathoption(ui, path, value):
     return value
 
@@ -2167,7 +2180,7 @@
         ``ValueError`` is raised.
         """
         if not rawloc:
-            raise ValueError('rawloc must be defined')
+            raise ValueError(b'rawloc must be defined')
 
         # Locations may define branches via syntax <base>#<branch>.
         u = util.url(rawloc)
@@ -2181,13 +2194,14 @@
 
         self.name = name
         self.rawloc = rawloc
-        self.loc = '%s' % u
+        self.loc = b'%s' % u
 
         # When given a raw location but not a symbolic name, validate the
         # location is valid.
         if not name and not u.scheme and not self._isvalidlocalpath(self.loc):
             raise ValueError(
-                'location is not a URL or path to a local ' 'repo: %s' % rawloc
+                b'location is not a URL or path to a local '
+                b'repo: %s' % rawloc
             )
 
         suboptions = suboptions or {}
@@ -2209,7 +2223,7 @@
         'valid' in this case (like when pulling from a git repo into a hg
         one)."""
         try:
-            return os.path.isdir(os.path.join(path, '.hg'))
+            return os.path.isdir(os.path.join(path, b'.hg'))
         # Python 2 may return TypeError. Python 3, ValueError.
         except (TypeError, ValueError):
             return False
@@ -2270,5 +2284,5 @@
     """
     # TODO: maybe change 'type' to a mandatory option
     if r'type' in opts and not getattr(dest, 'structured', False):
-        opts[r'label'] = opts.get(r'label', '') + ' ui.%s' % opts.pop(r'type')
+        opts[r'label'] = opts.get(r'label', b'') + b' ui.%s' % opts.pop(r'type')
     write(dest, *args, **opts)
--- a/mercurial/unionrepo.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/unionrepo.py	Sun Oct 06 09:48:39 2019 -0400
@@ -193,7 +193,7 @@
         self.repo2 = repo2
         self._url = url
 
-        self.ui.setconfig('phases', 'publish', False, 'unionrepo')
+        self.ui.setconfig(b'phases', b'publish', False, b'unionrepo')
 
     @localrepo.unfilteredpropertycache
     def changelog(self):
@@ -236,25 +236,25 @@
 
 def instance(ui, path, create, intents=None, createopts=None):
     if create:
-        raise error.Abort(_('cannot create new union repository'))
-    parentpath = ui.config("bundle", "mainreporoot")
+        raise error.Abort(_(b'cannot create new union repository'))
+    parentpath = ui.config(b"bundle", b"mainreporoot")
     if not parentpath:
         # try to find the correct path to the working directory repo
         parentpath = cmdutil.findrepo(encoding.getcwd())
         if parentpath is None:
-            parentpath = ''
+            parentpath = b''
     if parentpath:
         # Try to make the full path relative so we get a nice, short URL.
         # In particular, we don't want temp dir names in test outputs.
         cwd = encoding.getcwd()
         if parentpath == cwd:
-            parentpath = ''
+            parentpath = b''
         else:
             cwd = pathutil.normasprefix(cwd)
             if parentpath.startswith(cwd):
                 parentpath = parentpath[len(cwd) :]
-    if path.startswith('union:'):
-        s = path.split(":", 1)[1].split("+", 1)
+    if path.startswith(b'union:'):
+        s = path.split(b":", 1)[1].split(b"+", 1)
         if len(s) == 1:
             repopath, repopath2 = parentpath, s[0]
         else:
@@ -270,7 +270,7 @@
     repo1 = localrepo.instance(ui, repopath1, create=False)
     repo2 = localrepo.instance(ui, repopath2, create=False)
 
-    url = 'union:%s+%s' % (
+    url = b'union:%s+%s' % (
         util.expandpath(repopath1),
         util.expandpath(repopath2),
     )
--- a/mercurial/upgrade.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/upgrade.py	Sun Oct 06 09:48:39 2019 -0400
@@ -28,7 +28,7 @@
 
 # list of requirements that request a clone of all revlog if added/removed
 RECLONES_REQUIREMENTS = {
-    'generaldelta',
+    b'generaldelta',
     localrepo.SPARSEREVLOG_REQUIREMENT,
 }
 
@@ -41,9 +41,9 @@
     """
     return {
         # Introduced in Mercurial 0.9.2.
-        'revlogv1',
+        b'revlogv1',
         # Introduced in Mercurial 0.9.2.
-        'store',
+        b'store',
     }
 
 
@@ -56,12 +56,12 @@
     return {
         # The upgrade code does not yet support these experimental features.
         # This is an artificial limitation.
-        'treemanifest',
+        b'treemanifest',
         # This was a precursor to generaldelta and was never enabled by default.
         # It should (hopefully) not exist in the wild.
-        'parentdelta',
+        b'parentdelta',
         # Upgrade should operate on the actual store, not the shared link.
-        'shared',
+        b'shared',
     }
 
 
@@ -79,7 +79,7 @@
         engine = compression.compengines[name]
         if engine.available() and engine.revlogheader():
             supported.add(b'exp-compression-%s' % name)
-            if engine.name() == 'zstd':
+            if engine.name() == b'zstd':
                 supported.add(b'revlog-compression-zstd')
     return supported
 
@@ -93,18 +93,18 @@
     Extensions should monkeypatch this to add their custom requirements.
     """
     supported = {
-        'dotencode',
-        'fncache',
-        'generaldelta',
-        'revlogv1',
-        'store',
+        b'dotencode',
+        b'fncache',
+        b'generaldelta',
+        b'revlogv1',
+        b'store',
         localrepo.SPARSEREVLOG_REQUIREMENT,
     }
     for name in compression.compengines:
         engine = compression.compengines[name]
         if engine.available() and engine.revlogheader():
             supported.add(b'exp-compression-%s' % name)
-            if engine.name() == 'zstd':
+            if engine.name() == b'zstd':
                 supported.add(b'revlog-compression-zstd')
     return supported
 
@@ -120,16 +120,16 @@
     future, unknown requirements from accidentally being added.
     """
     supported = {
-        'dotencode',
-        'fncache',
-        'generaldelta',
+        b'dotencode',
+        b'fncache',
+        b'generaldelta',
         localrepo.SPARSEREVLOG_REQUIREMENT,
     }
     for name in compression.compengines:
         engine = compression.compengines[name]
         if engine.available() and engine.revlogheader():
             supported.add(b'exp-compression-%s' % name)
-            if engine.name() == 'zstd':
+            if engine.name() == b'zstd':
                 supported.add(b'revlog-compression-zstd')
     return supported
 
@@ -138,8 +138,8 @@
     return set()
 
 
-deficiency = 'deficiency'
-optimisation = 'optimization'
+deficiency = b'deficiency'
+optimisation = b'optimization'
 
 
 class improvement(object):
@@ -259,129 +259,129 @@
 
 @registerformatvariant
 class fncache(requirementformatvariant):
-    name = 'fncache'
+    name = b'fncache'
 
-    _requirement = 'fncache'
+    _requirement = b'fncache'
 
     default = True
 
     description = _(
-        'long and reserved filenames may not work correctly; '
-        'repository performance is sub-optimal'
+        b'long and reserved filenames may not work correctly; '
+        b'repository performance is sub-optimal'
     )
 
     upgrademessage = _(
-        'repository will be more resilient to storing '
-        'certain paths and performance of certain '
-        'operations should be improved'
+        b'repository will be more resilient to storing '
+        b'certain paths and performance of certain '
+        b'operations should be improved'
     )
 
 
 @registerformatvariant
 class dotencode(requirementformatvariant):
-    name = 'dotencode'
+    name = b'dotencode'
 
-    _requirement = 'dotencode'
+    _requirement = b'dotencode'
 
     default = True
 
     description = _(
-        'storage of filenames beginning with a period or '
-        'space may not work correctly'
+        b'storage of filenames beginning with a period or '
+        b'space may not work correctly'
     )
 
     upgrademessage = _(
-        'repository will be better able to store files '
-        'beginning with a space or period'
+        b'repository will be better able to store files '
+        b'beginning with a space or period'
     )
 
 
 @registerformatvariant
 class generaldelta(requirementformatvariant):
-    name = 'generaldelta'
+    name = b'generaldelta'
 
-    _requirement = 'generaldelta'
+    _requirement = b'generaldelta'
 
     default = True
 
     description = _(
-        'deltas within internal storage are unable to '
-        'choose optimal revisions; repository is larger and '
-        'slower than it could be; interaction with other '
-        'repositories may require extra network and CPU '
-        'resources, making "hg push" and "hg pull" slower'
+        b'deltas within internal storage are unable to '
+        b'choose optimal revisions; repository is larger and '
+        b'slower than it could be; interaction with other '
+        b'repositories may require extra network and CPU '
+        b'resources, making "hg push" and "hg pull" slower'
     )
 
     upgrademessage = _(
-        'repository storage will be able to create '
-        'optimal deltas; new repository data will be '
-        'smaller and read times should decrease; '
-        'interacting with other repositories using this '
-        'storage model should require less network and '
-        'CPU resources, making "hg push" and "hg pull" '
-        'faster'
+        b'repository storage will be able to create '
+        b'optimal deltas; new repository data will be '
+        b'smaller and read times should decrease; '
+        b'interacting with other repositories using this '
+        b'storage model should require less network and '
+        b'CPU resources, making "hg push" and "hg pull" '
+        b'faster'
     )
 
 
 @registerformatvariant
 class sparserevlog(requirementformatvariant):
-    name = 'sparserevlog'
+    name = b'sparserevlog'
 
     _requirement = localrepo.SPARSEREVLOG_REQUIREMENT
 
     default = True
 
     description = _(
-        'in order to limit disk reading and memory usage on older '
-        'version, the span of a delta chain from its root to its '
-        'end is limited, whatever the relevant data in this span. '
-        'This can severly limit Mercurial ability to build good '
-        'chain of delta resulting is much more storage space being '
-        'taken and limit reusability of on disk delta during '
-        'exchange.'
+        b'in order to limit disk reading and memory usage on older '
+        b'version, the span of a delta chain from its root to its '
+        b'end is limited, whatever the relevant data in this span. '
+        b'This can severly limit Mercurial ability to build good '
+        b'chain of delta resulting is much more storage space being '
+        b'taken and limit reusability of on disk delta during '
+        b'exchange.'
     )
 
     upgrademessage = _(
-        'Revlog supports delta chain with more unused data '
-        'between payload. These gaps will be skipped at read '
-        'time. This allows for better delta chains, making a '
-        'better compression and faster exchange with server.'
+        b'Revlog supports delta chain with more unused data '
+        b'between payload. These gaps will be skipped at read '
+        b'time. This allows for better delta chains, making a '
+        b'better compression and faster exchange with server.'
     )
 
 
 @registerformatvariant
 class sidedata(requirementformatvariant):
-    name = 'sidedata'
+    name = b'sidedata'
 
     _requirement = localrepo.SIDEDATA_REQUIREMENT
 
     default = False
 
     description = _(
-        'Allows storage of extra data alongside a revision, '
-        'unlocking various caching options.'
+        b'Allows storage of extra data alongside a revision, '
+        b'unlocking various caching options.'
     )
 
-    upgrademessage = _('Allows storage of extra data alongside a revision.')
+    upgrademessage = _(b'Allows storage of extra data alongside a revision.')
 
 
 @registerformatvariant
 class removecldeltachain(formatvariant):
-    name = 'plain-cl-delta'
+    name = b'plain-cl-delta'
 
     default = True
 
     description = _(
-        'changelog storage is using deltas instead of '
-        'raw entries; changelog reading and any '
-        'operation relying on changelog data are slower '
-        'than they could be'
+        b'changelog storage is using deltas instead of '
+        b'raw entries; changelog reading and any '
+        b'operation relying on changelog data are slower '
+        b'than they could be'
     )
 
     upgrademessage = _(
-        'changelog storage will be reformated to '
-        'store raw entries; changelog reading will be '
-        'faster; changelog size may be reduced'
+        b'changelog storage will be reformated to '
+        b'store raw entries; changelog reading will be '
+        b'faster; changelog size may be reduced'
     )
 
     @staticmethod
@@ -399,16 +399,16 @@
 
 @registerformatvariant
 class compressionengine(formatvariant):
-    name = 'compression'
-    default = 'zlib'
+    name = b'compression'
+    default = b'zlib'
 
     description = _(
-        'Compresion algorithm used to compress data. '
-        'Some engine are faster than other'
+        b'Compresion algorithm used to compress data. '
+        b'Some engine are faster than other'
     )
 
     upgrademessage = _(
-        'revlog content will be recompressed with the new ' 'algorithm.'
+        b'revlog content will be recompressed with the new ' b'algorithm.'
     )
 
     @classmethod
@@ -417,49 +417,49 @@
         # strickly speaking, revlog seems to support mixed compression style.
         #
         # The compression used for new entries will be "the last one"
-        compression = 'zlib'
+        compression = b'zlib'
         for req in repo.requirements:
             prefix = req.startswith
-            if prefix('revlog-compression-') or prefix('exp-compression-'):
-                compression = req.split('-', 2)[2]
+            if prefix(b'revlog-compression-') or prefix(b'exp-compression-'):
+                compression = req.split(b'-', 2)[2]
         return compression
 
     @classmethod
     def fromconfig(cls, repo):
-        return repo.ui.config('format', 'revlog-compression')
+        return repo.ui.config(b'format', b'revlog-compression')
 
 
 @registerformatvariant
 class compressionlevel(formatvariant):
-    name = 'compression-level'
-    default = 'default'
+    name = b'compression-level'
+    default = b'default'
 
-    description = _('compression level')
+    description = _(b'compression level')
 
-    upgrademessage = _('revlog content will be recompressed')
+    upgrademessage = _(b'revlog content will be recompressed')
 
     @classmethod
     def fromrepo(cls, repo):
         comp = compressionengine.fromrepo(repo)
         level = None
-        if comp == 'zlib':
-            level = repo.ui.configint('storage', 'revlog.zlib.level')
-        elif comp == 'zstd':
-            level = repo.ui.configint('storage', 'revlog.zstd.level')
+        if comp == b'zlib':
+            level = repo.ui.configint(b'storage', b'revlog.zlib.level')
+        elif comp == b'zstd':
+            level = repo.ui.configint(b'storage', b'revlog.zstd.level')
         if level is None:
-            return 'default'
+            return b'default'
         return bytes(level)
 
     @classmethod
     def fromconfig(cls, repo):
         comp = compressionengine.fromconfig(repo)
         level = None
-        if comp == 'zlib':
-            level = repo.ui.configint('storage', 'revlog.zlib.level')
-        elif comp == 'zstd':
-            level = repo.ui.configint('storage', 'revlog.zstd.level')
+        if comp == b'zlib':
+            level = repo.ui.configint(b'storage', b'revlog.zlib.level')
+        elif comp == b'zstd':
+            level = repo.ui.configint(b'storage', b'revlog.zstd.level')
         if level is None:
-            return 'default'
+            return b'default'
         return bytes(level)
 
 
@@ -485,10 +485,10 @@
 # forms in script when comparing result is anoying enough to add
 # backward compatibility for a while.
 legacy_opts_map = {
-    'redeltaparent': 're-delta-parent',
-    'redeltamultibase': 're-delta-multibase',
-    'redeltaall': 're-delta-all',
-    'redeltafulladd': 're-delta-fulladd',
+    b'redeltaparent': b're-delta-parent',
+    b'redeltamultibase': b're-delta-multibase',
+    b'redeltaall': b're-delta-all',
+    b'redeltafulladd': b're-delta-fulladd',
 }
 
 
@@ -500,82 +500,82 @@
 
     optimizations.append(
         improvement(
-            name='re-delta-parent',
+            name=b're-delta-parent',
             type=optimisation,
             description=_(
-                'deltas within internal storage will be recalculated to '
-                'choose an optimal base revision where this was not '
-                'already done; the size of the repository may shrink and '
-                'various operations may become faster; the first time '
-                'this optimization is performed could slow down upgrade '
-                'execution considerably; subsequent invocations should '
-                'not run noticeably slower'
+                b'deltas within internal storage will be recalculated to '
+                b'choose an optimal base revision where this was not '
+                b'already done; the size of the repository may shrink and '
+                b'various operations may become faster; the first time '
+                b'this optimization is performed could slow down upgrade '
+                b'execution considerably; subsequent invocations should '
+                b'not run noticeably slower'
             ),
             upgrademessage=_(
-                'deltas within internal storage will choose a new '
-                'base revision if needed'
+                b'deltas within internal storage will choose a new '
+                b'base revision if needed'
             ),
         )
     )
 
     optimizations.append(
         improvement(
-            name='re-delta-multibase',
+            name=b're-delta-multibase',
             type=optimisation,
             description=_(
-                'deltas within internal storage will be recalculated '
-                'against multiple base revision and the smallest '
-                'difference will be used; the size of the repository may '
-                'shrink significantly when there are many merges; this '
-                'optimization will slow down execution in proportion to '
-                'the number of merges in the repository and the amount '
-                'of files in the repository; this slow down should not '
-                'be significant unless there are tens of thousands of '
-                'files and thousands of merges'
+                b'deltas within internal storage will be recalculated '
+                b'against multiple base revision and the smallest '
+                b'difference will be used; the size of the repository may '
+                b'shrink significantly when there are many merges; this '
+                b'optimization will slow down execution in proportion to '
+                b'the number of merges in the repository and the amount '
+                b'of files in the repository; this slow down should not '
+                b'be significant unless there are tens of thousands of '
+                b'files and thousands of merges'
             ),
             upgrademessage=_(
-                'deltas within internal storage will choose an '
-                'optimal delta by computing deltas against multiple '
-                'parents; may slow down execution time '
-                'significantly'
+                b'deltas within internal storage will choose an '
+                b'optimal delta by computing deltas against multiple '
+                b'parents; may slow down execution time '
+                b'significantly'
             ),
         )
     )
 
     optimizations.append(
         improvement(
-            name='re-delta-all',
+            name=b're-delta-all',
             type=optimisation,
             description=_(
-                'deltas within internal storage will always be '
-                'recalculated without reusing prior deltas; this will '
-                'likely make execution run several times slower; this '
-                'optimization is typically not needed'
+                b'deltas within internal storage will always be '
+                b'recalculated without reusing prior deltas; this will '
+                b'likely make execution run several times slower; this '
+                b'optimization is typically not needed'
             ),
             upgrademessage=_(
-                'deltas within internal storage will be fully '
-                'recomputed; this will likely drastically slow down '
-                'execution time'
+                b'deltas within internal storage will be fully '
+                b'recomputed; this will likely drastically slow down '
+                b'execution time'
             ),
         )
     )
 
     optimizations.append(
         improvement(
-            name='re-delta-fulladd',
+            name=b're-delta-fulladd',
             type=optimisation,
             description=_(
-                'every revision will be re-added as if it was new '
-                'content. It will go through the full storage '
-                'mechanism giving extensions a chance to process it '
-                '(eg. lfs). This is similar to "re-delta-all" but even '
-                'slower since more logic is involved.'
+                b'every revision will be re-added as if it was new '
+                b'content. It will go through the full storage '
+                b'mechanism giving extensions a chance to process it '
+                b'(eg. lfs). This is similar to "re-delta-all" but even '
+                b'slower since more logic is involved.'
             ),
             upgrademessage=_(
-                'each revision will be added as new content to the '
-                'internal storage; this will likely drastically slow '
-                'down execution time, but some extensions might need '
-                'it'
+                b'each revision will be added as new content to the '
+                b'internal storage; this will likely drastically slow '
+                b'down execution time, but some extensions might need '
+                b'it'
             ),
         )
     )
@@ -621,10 +621,10 @@
 
     An instance of the appropriate class is returned.
     """
-    if path == '00changelog.i':
+    if path == b'00changelog.i':
         return changelog.changelog(repo.svfs)
-    elif path.endswith('00manifest.i'):
-        mandir = path[: -len('00manifest.i')]
+    elif path.endswith(b'00manifest.i'):
+        mandir = path[: -len(b'00manifest.i')]
         return manifest.manifestrevlog(repo.svfs, tree=mandir)
     else:
         # reverse of "/".join(("data", path + ".i"))
@@ -649,7 +649,7 @@
     olddata = oldvfs.join(oldrl.datafile)
     newdata = newvfs.join(newrl.datafile)
 
-    with newvfs(newrl.indexfile, 'w'):
+    with newvfs(newrl.indexfile, b'w'):
         pass  # create all the directories
 
     util.copyfile(oldindex, newindex)
@@ -658,12 +658,12 @@
         util.copyfile(olddata, newdata)
 
     if not (
-        unencodedname.endswith('00changelog.i')
-        or unencodedname.endswith('00manifest.i')
+        unencodedname.endswith(b'00changelog.i')
+        or unencodedname.endswith(b'00manifest.i')
     ):
         destrepo.svfs.fncache.add(unencodedname)
         if copydata:
-            destrepo.svfs.fncache.add(unencodedname[:-2] + '.d')
+            destrepo.svfs.fncache.add(unencodedname[:-2] + b'.d')
 
 
 UPGRADE_CHANGELOG = object()
@@ -679,9 +679,9 @@
     """check is a revlog is selected for cloning
 
     The store entry is checked against the passed filter"""
-    if entry.endswith('00changelog.i'):
+    if entry.endswith(b'00changelog.i'):
         return UPGRADE_CHANGELOG in revlogfilter
-    elif entry.endswith('00manifest.i'):
+    elif entry.endswith(b'00manifest.i'):
         return UPGRADE_MANIFEST in revlogfilter
     return UPGRADE_FILELOG in revlogfilter
 
@@ -720,7 +720,7 @@
     # Perform a pass to collect metadata. This validates we can open all
     # source files and allows a unified progress bar to be displayed.
     for unencoded, encoded, size in alldatafiles:
-        if unencoded.endswith('.d'):
+        if unencoded.endswith(b'.d'):
             continue
 
         rl = _revlogfrompath(srcrepo, unencoded)
@@ -732,9 +732,9 @@
             storedsize=True,
         )
 
-        revcount += info['revisionscount'] or 0
-        datasize = info['storedsize'] or 0
-        rawsize = info['trackedsize'] or 0
+        revcount += info[b'revisionscount'] or 0
+        datasize = info[b'storedsize'] or 0
+        rawsize = info[b'trackedsize'] or 0
 
         srcsize += datasize
         srcrawsize += rawsize
@@ -755,20 +755,20 @@
             fsrcsize += datasize
             frawsize += rawsize
         else:
-            error.ProgrammingError('unknown revlog type')
+            error.ProgrammingError(b'unknown revlog type')
 
     if not revcount:
         return
 
     ui.write(
         _(
-            'migrating %d total revisions (%d in filelogs, %d in manifests, '
-            '%d in changelog)\n'
+            b'migrating %d total revisions (%d in filelogs, %d in manifests, '
+            b'%d in changelog)\n'
         )
         % (revcount, frevcount, mrevcount, crevcount)
     )
     ui.write(
-        _('migrating %s in store; %s tracked data\n')
+        _(b'migrating %s in store; %s tracked data\n')
         % ((util.bytecount(srcsize), util.bytecount(srcrawsize)))
     )
 
@@ -782,24 +782,24 @@
     # FUTURE this operation can be farmed off to worker processes.
     seen = set()
     for unencoded, encoded, size in alldatafiles:
-        if unencoded.endswith('.d'):
+        if unencoded.endswith(b'.d'):
             continue
 
         oldrl = _revlogfrompath(srcrepo, unencoded)
 
-        if isinstance(oldrl, changelog.changelog) and 'c' not in seen:
+        if isinstance(oldrl, changelog.changelog) and b'c' not in seen:
             ui.write(
                 _(
-                    'finished migrating %d manifest revisions across %d '
-                    'manifests; change in size: %s\n'
+                    b'finished migrating %d manifest revisions across %d '
+                    b'manifests; change in size: %s\n'
                 )
                 % (mrevcount, mcount, util.bytecount(mdstsize - msrcsize))
             )
 
             ui.write(
                 _(
-                    'migrating changelog containing %d revisions '
-                    '(%s in store; %s tracked data)\n'
+                    b'migrating changelog containing %d revisions '
+                    b'(%s in store; %s tracked data)\n'
                 )
                 % (
                     crevcount,
@@ -807,23 +807,23 @@
                     util.bytecount(crawsize),
                 )
             )
-            seen.add('c')
+            seen.add(b'c')
             progress = srcrepo.ui.makeprogress(
-                _('changelog revisions'), total=crevcount
+                _(b'changelog revisions'), total=crevcount
             )
-        elif isinstance(oldrl, manifest.manifestrevlog) and 'm' not in seen:
+        elif isinstance(oldrl, manifest.manifestrevlog) and b'm' not in seen:
             ui.write(
                 _(
-                    'finished migrating %d filelog revisions across %d '
-                    'filelogs; change in size: %s\n'
+                    b'finished migrating %d filelog revisions across %d '
+                    b'filelogs; change in size: %s\n'
                 )
                 % (frevcount, fcount, util.bytecount(fdstsize - fsrcsize))
             )
 
             ui.write(
                 _(
-                    'migrating %d manifests containing %d revisions '
-                    '(%s in store; %s tracked data)\n'
+                    b'migrating %d manifests containing %d revisions '
+                    b'(%s in store; %s tracked data)\n'
                 )
                 % (
                     mcount,
@@ -832,17 +832,17 @@
                     util.bytecount(mrawsize),
                 )
             )
-            seen.add('m')
+            seen.add(b'm')
             if progress:
                 progress.complete()
             progress = srcrepo.ui.makeprogress(
-                _('manifest revisions'), total=mrevcount
+                _(b'manifest revisions'), total=mrevcount
             )
-        elif 'f' not in seen:
+        elif b'f' not in seen:
             ui.write(
                 _(
-                    'migrating %d filelogs containing %d revisions '
-                    '(%s in store; %s tracked data)\n'
+                    b'migrating %d filelogs containing %d revisions '
+                    b'(%s in store; %s tracked data)\n'
                 )
                 % (
                     fcount,
@@ -851,16 +851,16 @@
                     util.bytecount(frawsize),
                 )
             )
-            seen.add('f')
+            seen.add(b'f')
             if progress:
                 progress.complete()
             progress = srcrepo.ui.makeprogress(
-                _('file revisions'), total=frevcount
+                _(b'file revisions'), total=frevcount
             )
 
         if matchrevlog(revlogs, unencoded):
             ui.note(
-                _('cloning %d revisions from %s\n') % (len(oldrl), unencoded)
+                _(b'cloning %d revisions from %s\n') % (len(oldrl), unencoded)
             )
             newrl = _revlogfrompath(dstrepo, unencoded)
             oldrl.clone(
@@ -871,14 +871,14 @@
                 forcedeltabothparents=forcedeltabothparents,
             )
         else:
-            msg = _('blindly copying %s containing %i revisions\n')
+            msg = _(b'blindly copying %s containing %i revisions\n')
             ui.note(msg % (unencoded, len(oldrl)))
             _copyrevlog(tr, dstrepo, oldrl, unencoded)
 
             newrl = _revlogfrompath(dstrepo, unencoded)
 
         info = newrl.storageinfo(storedsize=True)
-        datasize = info['storedsize'] or 0
+        datasize = info[b'storedsize'] or 0
 
         dstsize += datasize
 
@@ -892,14 +892,17 @@
     progress.complete()
 
     ui.write(
-        _('finished migrating %d changelog revisions; change in size: ' '%s\n')
+        _(
+            b'finished migrating %d changelog revisions; change in size: '
+            b'%s\n'
+        )
         % (crevcount, util.bytecount(cdstsize - csrcsize))
     )
 
     ui.write(
         _(
-            'finished migrating %d total revisions; total change in store '
-            'size: %s\n'
+            b'finished migrating %d total revisions; total change in store '
+            b'size: %s\n'
         )
         % (revcount, util.bytecount(dstsize - srcsize))
     )
@@ -922,16 +925,16 @@
     Function should return ``True`` if the file is to be copied.
     """
     # Skip revlogs.
-    if path.endswith(('.i', '.d')):
+    if path.endswith((b'.i', b'.d')):
         return False
     # Skip transaction related files.
-    if path.startswith('undo'):
+    if path.startswith(b'undo'):
         return False
     # Only copy regular files.
     if mode != stat.S_IFREG:
         return False
     # Skip other skipped files.
-    if path in ('lock', 'fncache'):
+    if path in (b'lock', b'fncache'):
         return False
 
     return True
@@ -962,53 +965,53 @@
 
     ui.write(
         _(
-            '(it is safe to interrupt this process any time before '
-            'data migration completes)\n'
+            b'(it is safe to interrupt this process any time before '
+            b'data migration completes)\n'
         )
     )
 
-    if 're-delta-all' in actions:
+    if b're-delta-all' in actions:
         deltareuse = revlog.revlog.DELTAREUSENEVER
-    elif 're-delta-parent' in actions:
+    elif b're-delta-parent' in actions:
         deltareuse = revlog.revlog.DELTAREUSESAMEREVS
-    elif 're-delta-multibase' in actions:
+    elif b're-delta-multibase' in actions:
         deltareuse = revlog.revlog.DELTAREUSESAMEREVS
-    elif 're-delta-fulladd' in actions:
+    elif b're-delta-fulladd' in actions:
         deltareuse = revlog.revlog.DELTAREUSEFULLADD
     else:
         deltareuse = revlog.revlog.DELTAREUSEALWAYS
 
-    with dstrepo.transaction('upgrade') as tr:
+    with dstrepo.transaction(b'upgrade') as tr:
         _clonerevlogs(
             ui,
             srcrepo,
             dstrepo,
             tr,
             deltareuse,
-            're-delta-multibase' in actions,
+            b're-delta-multibase' in actions,
             revlogs=revlogs,
         )
 
     # Now copy other files in the store directory.
     # The sorted() makes execution deterministic.
-    for p, kind, st in sorted(srcrepo.store.vfs.readdir('', stat=True)):
+    for p, kind, st in sorted(srcrepo.store.vfs.readdir(b'', stat=True)):
         if not _filterstorefile(srcrepo, dstrepo, requirements, p, kind, st):
             continue
 
-        srcrepo.ui.write(_('copying %s\n') % p)
+        srcrepo.ui.write(_(b'copying %s\n') % p)
         src = srcrepo.store.rawvfs.join(p)
         dst = dstrepo.store.rawvfs.join(p)
         util.copyfile(src, dst, copystat=True)
 
     _finishdatamigration(ui, srcrepo, dstrepo, requirements)
 
-    ui.write(_('data fully migrated to temporary repository\n'))
+    ui.write(_(b'data fully migrated to temporary repository\n'))
 
-    backuppath = pycompat.mkdtemp(prefix='upgradebackup.', dir=srcrepo.path)
+    backuppath = pycompat.mkdtemp(prefix=b'upgradebackup.', dir=srcrepo.path)
     backupvfs = vfsmod.vfs(backuppath)
 
     # Make a backup of requires file first, as it is the first to be modified.
-    util.copyfile(srcrepo.vfs.join('requires'), backupvfs.join('requires'))
+    util.copyfile(srcrepo.vfs.join(b'requires'), backupvfs.join(b'requires'))
 
     # We install an arbitrary requirement that clients must not support
     # as a mechanism to lock out new clients during the data swap. This is
@@ -1016,29 +1019,29 @@
     # an inconsistent state.
     ui.write(
         _(
-            'marking source repository as being upgraded; clients will be '
-            'unable to read from repository\n'
+            b'marking source repository as being upgraded; clients will be '
+            b'unable to read from repository\n'
         )
     )
     scmutil.writerequires(
-        srcrepo.vfs, srcrepo.requirements | {'upgradeinprogress'}
+        srcrepo.vfs, srcrepo.requirements | {b'upgradeinprogress'}
     )
 
-    ui.write(_('starting in-place swap of repository data\n'))
-    ui.write(_('replaced files will be backed up at %s\n') % backuppath)
+    ui.write(_(b'starting in-place swap of repository data\n'))
+    ui.write(_(b'replaced files will be backed up at %s\n') % backuppath)
 
     # Now swap in the new store directory. Doing it as a rename should make
     # the operation nearly instantaneous and atomic (at least in well-behaved
     # environments).
-    ui.write(_('replacing store...\n'))
+    ui.write(_(b'replacing store...\n'))
     tstart = util.timer()
-    util.rename(srcrepo.spath, backupvfs.join('store'))
+    util.rename(srcrepo.spath, backupvfs.join(b'store'))
     util.rename(dstrepo.spath, srcrepo.spath)
     elapsed = util.timer() - tstart
     ui.write(
         _(
-            'store replacement complete; repository was inconsistent for '
-            '%0.1fs\n'
+            b'store replacement complete; repository was inconsistent for '
+            b'%0.1fs\n'
         )
         % elapsed
     )
@@ -1047,8 +1050,8 @@
     # out legacy clients.
     ui.write(
         _(
-            'finalizing requirements file and making repository readable '
-            'again\n'
+            b'finalizing requirements file and making repository readable '
+            b'again\n'
         )
     )
     scmutil.writerequires(srcrepo.vfs, requirements)
@@ -1057,7 +1060,7 @@
     # reference to its new location. So clean it up manually. Alternatively, we
     # could update srcrepo.svfs and other variables to point to the new
     # location. This is simpler.
-    backupvfs.unlink('store/lock')
+    backupvfs.unlink(b'store/lock')
 
     return backuppath
 
@@ -1078,7 +1081,7 @@
     repo = repo.unfiltered()
 
     revlogs = set(UPGRADE_ALL_REVLOGS)
-    specentries = (('c', changelog), ('m', manifest))
+    specentries = ((b'c', changelog), (b'm', manifest))
     specified = [(y, x) for (y, x) in specentries if x is not None]
     if specified:
         # we have some limitation on revlogs to be recloned
@@ -1086,34 +1089,34 @@
             revlogs = set()
             for r, enabled in specified:
                 if enabled:
-                    if r == 'c':
+                    if r == b'c':
                         revlogs.add(UPGRADE_CHANGELOG)
-                    elif r == 'm':
+                    elif r == b'm':
                         revlogs.add(UPGRADE_MANIFEST)
         else:
             # none are enabled
             for r, __ in specified:
-                if r == 'c':
+                if r == b'c':
                     revlogs.discard(UPGRADE_CHANGELOG)
-                elif r == 'm':
+                elif r == b'm':
                     revlogs.discard(UPGRADE_MANIFEST)
 
     # Ensure the repository can be upgraded.
     missingreqs = requiredsourcerequirements(repo) - repo.requirements
     if missingreqs:
         raise error.Abort(
-            _('cannot upgrade repository; requirement ' 'missing: %s')
-            % _(', ').join(sorted(missingreqs))
+            _(b'cannot upgrade repository; requirement ' b'missing: %s')
+            % _(b', ').join(sorted(missingreqs))
         )
 
     blockedreqs = blocksourcerequirements(repo) & repo.requirements
     if blockedreqs:
         raise error.Abort(
             _(
-                'cannot upgrade repository; unsupported source '
-                'requirement: %s'
+                b'cannot upgrade repository; unsupported source '
+                b'requirement: %s'
             )
-            % _(', ').join(sorted(blockedreqs))
+            % _(b', ').join(sorted(blockedreqs))
         )
 
     # FUTURE there is potentially a need to control the wanted requirements via
@@ -1128,28 +1131,31 @@
     )
     if noremovereqs:
         raise error.Abort(
-            _('cannot upgrade repository; requirement would be ' 'removed: %s')
-            % _(', ').join(sorted(noremovereqs))
+            _(
+                b'cannot upgrade repository; requirement would be '
+                b'removed: %s'
+            )
+            % _(b', ').join(sorted(noremovereqs))
         )
 
     noaddreqs = newreqs - repo.requirements - allowednewrequirements(repo)
     if noaddreqs:
         raise error.Abort(
             _(
-                'cannot upgrade repository; do not support adding '
-                'requirement: %s'
+                b'cannot upgrade repository; do not support adding '
+                b'requirement: %s'
             )
-            % _(', ').join(sorted(noaddreqs))
+            % _(b', ').join(sorted(noaddreqs))
         )
 
     unsupportedreqs = newreqs - supporteddestrequirements(repo)
     if unsupportedreqs:
         raise error.Abort(
             _(
-                'cannot upgrade repository; do not support '
-                'destination requirement: %s'
+                b'cannot upgrade repository; do not support '
+                b'destination requirement: %s'
             )
-            % _(', ').join(sorted(unsupportedreqs))
+            % _(b', ').join(sorted(unsupportedreqs))
         )
 
     # Find and validate all improvements that can be made.
@@ -1164,9 +1170,9 @@
 
     if optimize:  # anything left is unknown
         raise error.Abort(
-            _('unknown optimization action requested: %s')
-            % ', '.join(sorted(optimize)),
-            hint=_('run without arguments to see valid ' 'optimizations'),
+            _(b'unknown optimization action requested: %s')
+            % b', '.join(sorted(optimize)),
+            hint=_(b'run without arguments to see valid ' b'optimizations'),
         )
 
     deficiencies = finddeficiencies(repo)
@@ -1185,36 +1191,36 @@
         incompatible = RECLONES_REQUIREMENTS & (removedreqs | addedreqs)
         if incompatible:
             msg = _(
-                'ignoring revlogs selection flags, format requirements '
-                'change: %s\n'
+                b'ignoring revlogs selection flags, format requirements '
+                b'change: %s\n'
             )
-            ui.warn(msg % ', '.join(sorted(incompatible)))
+            ui.warn(msg % b', '.join(sorted(incompatible)))
             revlogs = UPGRADE_ALL_REVLOGS
 
     def printrequirements():
-        ui.write(_('requirements\n'))
+        ui.write(_(b'requirements\n'))
         ui.write(
-            _('   preserved: %s\n')
-            % _(', ').join(sorted(newreqs & repo.requirements))
+            _(b'   preserved: %s\n')
+            % _(b', ').join(sorted(newreqs & repo.requirements))
         )
 
         if repo.requirements - newreqs:
             ui.write(
-                _('   removed: %s\n')
-                % _(', ').join(sorted(repo.requirements - newreqs))
+                _(b'   removed: %s\n')
+                % _(b', ').join(sorted(repo.requirements - newreqs))
             )
 
         if newreqs - repo.requirements:
             ui.write(
-                _('   added: %s\n')
-                % _(', ').join(sorted(newreqs - repo.requirements))
+                _(b'   added: %s\n')
+                % _(b', ').join(sorted(newreqs - repo.requirements))
             )
 
-        ui.write('\n')
+        ui.write(b'\n')
 
     def printupgradeactions():
         for a in actions:
-            ui.write('%s\n   %s\n\n' % (a.name, a.upgrademessage))
+            ui.write(b'%s\n   %s\n\n' % (a.name, a.upgrademessage))
 
     if not run:
         fromconfig = []
@@ -1231,33 +1237,36 @@
             if fromconfig:
                 ui.write(
                     _(
-                        'repository lacks features recommended by '
-                        'current config options:\n\n'
+                        b'repository lacks features recommended by '
+                        b'current config options:\n\n'
                     )
                 )
                 for i in fromconfig:
-                    ui.write('%s\n   %s\n\n' % (i.name, i.description))
+                    ui.write(b'%s\n   %s\n\n' % (i.name, i.description))
 
             if onlydefault:
                 ui.write(
                     _(
-                        'repository lacks features used by the default '
-                        'config options:\n\n'
+                        b'repository lacks features used by the default '
+                        b'config options:\n\n'
                     )
                 )
                 for i in onlydefault:
-                    ui.write('%s\n   %s\n\n' % (i.name, i.description))
+                    ui.write(b'%s\n   %s\n\n' % (i.name, i.description))
 
-            ui.write('\n')
+            ui.write(b'\n')
         else:
             ui.write(
-                _('(no feature deficiencies found in existing ' 'repository)\n')
+                _(
+                    b'(no feature deficiencies found in existing '
+                    b'repository)\n'
+                )
             )
 
         ui.write(
             _(
-                'performing an upgrade with "--run" will make the following '
-                'changes:\n\n'
+                b'performing an upgrade with "--run" will make the following '
+                b'changes:\n\n'
             )
         )
 
@@ -1269,36 +1278,36 @@
         if unusedoptimize:
             ui.write(
                 _(
-                    'additional optimizations are available by specifying '
-                    '"--optimize <name>":\n\n'
+                    b'additional optimizations are available by specifying '
+                    b'"--optimize <name>":\n\n'
                 )
             )
             for i in unusedoptimize:
-                ui.write(_('%s\n   %s\n\n') % (i.name, i.description))
+                ui.write(_(b'%s\n   %s\n\n') % (i.name, i.description))
         return
 
     # Else we're in the run=true case.
-    ui.write(_('upgrade will perform the following actions:\n\n'))
+    ui.write(_(b'upgrade will perform the following actions:\n\n'))
     printrequirements()
     printupgradeactions()
 
     upgradeactions = [a.name for a in actions]
 
-    ui.write(_('beginning upgrade...\n'))
+    ui.write(_(b'beginning upgrade...\n'))
     with repo.wlock(), repo.lock():
-        ui.write(_('repository locked and read-only\n'))
+        ui.write(_(b'repository locked and read-only\n'))
         # Our strategy for upgrading the repository is to create a new,
         # temporary repository, write data to it, then do a swap of the
         # data. There are less heavyweight ways to do this, but it is easier
         # to create a new repo object than to instantiate all the components
         # (like the store) separately.
-        tmppath = pycompat.mkdtemp(prefix='upgrade.', dir=repo.path)
+        tmppath = pycompat.mkdtemp(prefix=b'upgrade.', dir=repo.path)
         backuppath = None
         try:
             ui.write(
                 _(
-                    'creating temporary repository to stage migrated '
-                    'data: %s\n'
+                    b'creating temporary repository to stage migrated '
+                    b'data: %s\n'
                 )
                 % tmppath
             )
@@ -1312,22 +1321,22 @@
                     ui, repo, dstrepo, newreqs, upgradeactions, revlogs=revlogs
                 )
             if not (backup or backuppath is None):
-                ui.write(_('removing old repository content%s\n') % backuppath)
+                ui.write(_(b'removing old repository content%s\n') % backuppath)
                 repo.vfs.rmtree(backuppath, forcibly=True)
                 backuppath = None
 
         finally:
-            ui.write(_('removing temporary repository %s\n') % tmppath)
+            ui.write(_(b'removing temporary repository %s\n') % tmppath)
             repo.vfs.rmtree(tmppath, forcibly=True)
 
             if backuppath:
                 ui.warn(
-                    _('copy of old repository backed up at %s\n') % backuppath
+                    _(b'copy of old repository backed up at %s\n') % backuppath
                 )
                 ui.warn(
                     _(
-                        'the old repository will not be deleted; remove '
-                        'it to free up disk space once the upgraded '
-                        'repository is verified\n'
+                        b'the old repository will not be deleted; remove '
+                        b'it to free up disk space once the upgraded '
+                        b'repository is verified\n'
                     )
                 )
--- a/mercurial/url.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/url.py	Sun Oct 06 09:48:39 2019 -0400
@@ -71,26 +71,26 @@
             res = httpconnectionmod.readauthforuri(self.ui, authuri, user)
             if res:
                 group, auth = res
-                user, passwd = auth.get('username'), auth.get('password')
-                self.ui.debug("using auth.%s.* for authentication\n" % group)
+                user, passwd = auth.get(b'username'), auth.get(b'password')
+                self.ui.debug(b"using auth.%s.* for authentication\n" % group)
         if not user or not passwd:
             u = util.url(pycompat.bytesurl(authuri))
             u.query = None
             if not self.ui.interactive():
                 raise error.Abort(
-                    _('http authorization required for %s')
+                    _(b'http authorization required for %s')
                     % util.hidepassword(bytes(u))
                 )
 
             self.ui.write(
-                _("http authorization required for %s\n")
+                _(b"http authorization required for %s\n")
                 % util.hidepassword(bytes(u))
             )
-            self.ui.write(_("realm: %s\n") % pycompat.bytesurl(realm))
+            self.ui.write(_(b"realm: %s\n") % pycompat.bytesurl(realm))
             if user:
-                self.ui.write(_("user: %s\n") % user)
+                self.ui.write(_(b"user: %s\n") % user)
             else:
-                user = self.ui.prompt(_("user:"), default=None)
+                user = self.ui.prompt(_(b"user:"), default=None)
 
             if not passwd:
                 passwd = self.ui.getpass()
@@ -100,8 +100,8 @@
         return (pycompat.strurl(user), pycompat.strurl(passwd))
 
     def _writedebug(self, user, passwd):
-        msg = _('http auth: user %s, password %s\n')
-        self.ui.debug(msg % (user, passwd and '*' * len(passwd) or 'not set'))
+        msg = _(b'http auth: user %s, password %s\n')
+        self.ui.debug(msg % (user, passwd and b'*' * len(passwd) or b'not set'))
 
     def find_stored_password(self, authuri):
         return self.passwddb.find_user_password(None, authuri)
@@ -109,36 +109,36 @@
 
 class proxyhandler(urlreq.proxyhandler):
     def __init__(self, ui):
-        proxyurl = ui.config("http_proxy", "host") or encoding.environ.get(
-            'http_proxy'
+        proxyurl = ui.config(b"http_proxy", b"host") or encoding.environ.get(
+            b'http_proxy'
         )
         # XXX proxyauthinfo = None
 
         if proxyurl:
             # proxy can be proper url or host[:port]
             if not (
-                proxyurl.startswith('http:') or proxyurl.startswith('https:')
+                proxyurl.startswith(b'http:') or proxyurl.startswith(b'https:')
             ):
-                proxyurl = 'http://' + proxyurl + '/'
+                proxyurl = b'http://' + proxyurl + b'/'
             proxy = util.url(proxyurl)
             if not proxy.user:
-                proxy.user = ui.config("http_proxy", "user")
-                proxy.passwd = ui.config("http_proxy", "passwd")
+                proxy.user = ui.config(b"http_proxy", b"user")
+                proxy.passwd = ui.config(b"http_proxy", b"passwd")
 
             # see if we should use a proxy for this url
-            no_list = ["localhost", "127.0.0.1"]
+            no_list = [b"localhost", b"127.0.0.1"]
             no_list.extend(
-                [p.lower() for p in ui.configlist("http_proxy", "no")]
+                [p.lower() for p in ui.configlist(b"http_proxy", b"no")]
             )
             no_list.extend(
                 [
                     p.strip().lower()
-                    for p in encoding.environ.get("no_proxy", '').split(',')
+                    for p in encoding.environ.get(b"no_proxy", b'').split(b',')
                     if p.strip()
                 ]
             )
             # "http_proxy.always" config is for running tests on localhost
-            if ui.configbool("http_proxy", "always"):
+            if ui.configbool(b"http_proxy", b"always"):
                 self.no_list = []
             else:
                 self.no_list = no_list
@@ -147,7 +147,7 @@
             # expects them to be.
             proxyurl = str(proxy)
             proxies = {r'http': proxyurl, r'https': proxyurl}
-            ui.debug('proxying through %s\n' % util.hidepassword(bytes(proxy)))
+            ui.debug(b'proxying through %s\n' % util.hidepassword(bytes(proxy)))
         else:
             proxies = {}
 
@@ -155,13 +155,13 @@
         self.ui = ui
 
     def proxy_open(self, req, proxy, type_):
-        host = pycompat.bytesurl(urllibcompat.gethost(req)).split(':')[0]
+        host = pycompat.bytesurl(urllibcompat.gethost(req)).split(b':')[0]
         for e in self.no_list:
             if host == e:
                 return None
-            if e.startswith('*.') and host.endswith(e[2:]):
+            if e.startswith(b'*.') and host.endswith(e[2:]):
                 return None
-            if e.startswith('.') and host.endswith(e[1:]):
+            if e.startswith(b'.') and host.endswith(e[1:]):
                 return None
 
         return urlreq.proxyhandler.proxy_open(self, req, proxy, type_)
@@ -181,7 +181,7 @@
     return _sendfile
 
 
-has_https = util.safehasattr(urlreq, 'httpshandler')
+has_https = util.safehasattr(urlreq, b'httpshandler')
 
 
 class httpconnection(keepalive.HTTPConnection):
@@ -212,8 +212,8 @@
 
     if new_tunnel or tunnel_host == urllibcompat.getfullurl(req):  # has proxy
         u = util.url(pycompat.bytesurl(tunnel_host))
-        if new_tunnel or u.scheme == 'https':  # only use CONNECT for HTTPS
-            h.realhostport = ':'.join([u.host, (u.port or '443')])
+        if new_tunnel or u.scheme == b'https':  # only use CONNECT for HTTPS
+            h.realhostport = b':'.join([u.host, (u.port or b'443')])
             h.headers = req.headers.copy()
             h.headers.update(handler.parent.addheaders)
             return
@@ -230,10 +230,10 @@
             if x.lower().startswith(r'proxy-')
         ]
     )
-    self.send('CONNECT %s HTTP/1.0\r\n' % self.realhostport)
+    self.send(b'CONNECT %s HTTP/1.0\r\n' % self.realhostport)
     for header in proxyheaders.iteritems():
-        self.send('%s: %s\r\n' % header)
-    self.send('\r\n')
+        self.send(b'%s: %s\r\n' % header)
+    self.send(b'\r\n')
 
     # majority of the following code is duplicated from
     # httplib.HTTPConnection as there are no adequate places to
@@ -241,7 +241,7 @@
     # strict was removed in Python 3.4.
     kwargs = {}
     if not pycompat.ispy3:
-        kwargs['strict'] = self.strict
+        kwargs[b'strict'] = self.strict
 
     res = self.response_class(self.sock, method=self._method, **kwargs)
 
@@ -250,20 +250,20 @@
         if status != httplib.CONTINUE:
             break
         # skip lines that are all whitespace
-        list(iter(lambda: res.fp.readline().strip(), ''))
+        list(iter(lambda: res.fp.readline().strip(), b''))
     res.status = status
     res.reason = reason.strip()
 
     if res.status == 200:
         # skip lines until we find a blank line
-        list(iter(res.fp.readline, '\r\n'))
+        list(iter(res.fp.readline, b'\r\n'))
         return True
 
-    if version == 'HTTP/1.0':
+    if version == b'HTTP/1.0':
         res.version = 10
-    elif version.startswith('HTTP/1.'):
+    elif version.startswith(b'HTTP/1.'):
         res.version = 11
-    elif version == 'HTTP/0.9':
+    elif version == b'HTTP/0.9':
         res.version = 9
     else:
         raise httplib.UnknownProtocol(version)
@@ -279,8 +279,8 @@
     res.msg.fp = None
 
     # are we using the chunked-style of transfer encoding?
-    trenc = res.msg.getheader('transfer-encoding')
-    if trenc and trenc.lower() == "chunked":
+    trenc = res.msg.getheader(b'transfer-encoding')
+    if trenc and trenc.lower() == b"chunked":
         res.chunked = 1
         res.chunk_left = None
     else:
@@ -292,7 +292,7 @@
     # do we have a Content-Length?
     # NOTE: RFC 2616, section 4.4, #3 says we ignore this if
     # transfer-encoding is "chunked"
-    length = res.msg.getheader('content-length')
+    length = res.msg.getheader(b'content-length')
     if length and not res.chunked:
         try:
             res.length = int(length)
@@ -309,7 +309,7 @@
         status == httplib.NO_CONTENT
         or status == httplib.NOT_MODIFIED
         or 100 <= status < 200
-        or res._method == 'HEAD'  # 1xx codes
+        or res._method == b'HEAD'  # 1xx codes
     ):
         res.length = 0
 
@@ -403,7 +403,7 @@
             host = self.host
             if self.realhostport:  # use CONNECT proxy
                 _generic_proxytunnel(self)
-                host = self.realhostport.rsplit(':', 1)[0]
+                host = self.realhostport.rsplit(b':', 1)[0]
             self.sock = sslutil.wrapsocket(
                 self.sock,
                 self.key_file,
@@ -433,7 +433,7 @@
             if res:
                 group, auth = res
                 self.auth = auth
-                self.ui.debug("using auth.%s.* for authentication\n" % group)
+                self.ui.debug(b"using auth.%s.* for authentication\n" % group)
             else:
                 self.auth = None
             return self.do_open(self._makeconnection, req)
@@ -450,9 +450,9 @@
 
             # if the user has specified different key/cert files in
             # hgrc, we prefer these
-            if self.auth and 'key' in self.auth and 'cert' in self.auth:
-                keyfile = self.auth['key']
-                certfile = self.auth['cert']
+            if self.auth and b'key' in self.auth and b'cert' in self.auth:
+                keyfile = self.auth[b'key']
+                certfile = self.auth[b'cert']
 
             conn = httpsconnection(
                 host, port, keyfile, certfile, *args, **kwargs
@@ -520,7 +520,7 @@
             realm, urllibcompat.getfullurl(req)
         )
         if pw is not None:
-            raw = "%s:%s" % (pycompat.bytesurl(user), pycompat.bytesurl(pw))
+            raw = b"%s:%s" % (pycompat.bytesurl(user), pycompat.bytesurl(pw))
             auth = r'Basic %s' % pycompat.strurl(base64.b64encode(raw).strip())
             if req.get_header(self.auth_header, None) == auth:
                 return None
@@ -535,7 +535,7 @@
     def __init__(self, ui):
         self.cookiejar = None
 
-        cookiefile = ui.config('auth', 'cookiefile')
+        cookiefile = ui.config(b'auth', b'cookiefile')
         if not cookiefile:
             return
 
@@ -549,8 +549,8 @@
         except util.cookielib.LoadError as e:
             ui.warn(
                 _(
-                    '(error loading cookie file %s: %s; continuing without '
-                    'cookies)\n'
+                    b'(error loading cookie file %s: %s; continuing without '
+                    b'cookies)\n'
                 )
                 % (cookiefile, stringutil.forcebytestr(e))
             )
@@ -595,7 +595,7 @@
     ``sendaccept`` allows controlling whether the ``Accept`` request header
     is sent. The header is sent by default.
     '''
-    timeout = ui.configwith(float, 'http', 'timeout')
+    timeout = ui.configwith(float, b'http', b'timeout')
     handlers = []
 
     if loggingfh:
@@ -621,8 +621,8 @@
         if user != saveduser or passwd:
             passmgr.add_password(realm, uris, user, passwd)
         ui.debug(
-            'http auth: user %s, password %s\n'
-            % (user, passwd and '*' * len(passwd) or 'not set')
+            b'http auth: user %s, password %s\n'
+            % (user, passwd and b'*' * len(passwd) or b'not set')
         )
 
     handlers.extend(
@@ -653,7 +653,7 @@
     # The custom user agent is for lfs, because unfortunately some servers
     # do look at this value.
     if not useragent:
-        agent = 'mercurial/proto-1.0 (Mercurial %s)' % util.version()
+        agent = b'mercurial/proto-1.0 (Mercurial %s)' % util.version()
         opener.addheaders = [(r'User-agent', pycompat.sysstr(agent))]
     else:
         opener.addheaders = [(r'User-agent', pycompat.sysstr(useragent))]
@@ -675,7 +675,7 @@
         url_, authinfo = u.authinfo()
     else:
         path = util.normpath(os.path.abspath(url_))
-        url_ = 'file://' + pycompat.bytesurl(urlreq.pathname2url(path))
+        url_ = b'file://' + pycompat.bytesurl(urlreq.pathname2url(path))
         authinfo = None
     return opener(ui, authinfo, sendaccept=sendaccept).open(
         pycompat.strurl(url_), data
@@ -700,27 +700,27 @@
                     got = len(e.partial)
                     total = e.expected + got
                     msg = _(
-                        'HTTP request error (incomplete response; '
-                        'expected %d bytes got %d)'
+                        b'HTTP request error (incomplete response; '
+                        b'expected %d bytes got %d)'
                     ) % (total, got)
                 else:
-                    msg = _('HTTP request error (incomplete response)')
+                    msg = _(b'HTTP request error (incomplete response)')
 
                 raise error.PeerTransportError(
                     msg,
                     hint=_(
-                        'this may be an intermittent network failure; '
-                        'if the error persists, consider contacting the '
-                        'network or server operator'
+                        b'this may be an intermittent network failure; '
+                        b'if the error persists, consider contacting the '
+                        b'network or server operator'
                     ),
                 )
             except httplib.HTTPException as e:
                 raise error.PeerTransportError(
-                    _('HTTP request error (%s)') % e,
+                    _(b'HTTP request error (%s)') % e,
                     hint=_(
-                        'this may be an intermittent network failure; '
-                        'if the error persists, consider contacting the '
-                        'network or server operator'
+                        b'this may be an intermittent network failure; '
+                        b'if the error persists, consider contacting the '
+                        b'network or server operator'
                     ),
                 )
 
--- a/mercurial/urllibcompat.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/urllibcompat.py	Sun Oct 06 09:48:39 2019 -0400
@@ -45,56 +45,56 @@
     urlreq._registeraliases(
         urllib.parse,
         (
-            "splitattr",
-            "splitpasswd",
-            "splitport",
-            "splituser",
-            "urlparse",
-            "urlunparse",
+            b"splitattr",
+            b"splitpasswd",
+            b"splitport",
+            b"splituser",
+            b"urlparse",
+            b"urlunparse",
         ),
     )
-    urlreq._registeralias(urllib.parse, "parse_qs", "parseqs")
-    urlreq._registeralias(urllib.parse, "parse_qsl", "parseqsl")
-    urlreq._registeralias(urllib.parse, "unquote_to_bytes", "unquote")
+    urlreq._registeralias(urllib.parse, b"parse_qs", b"parseqs")
+    urlreq._registeralias(urllib.parse, b"parse_qsl", b"parseqsl")
+    urlreq._registeralias(urllib.parse, b"unquote_to_bytes", b"unquote")
     import urllib.request
 
     urlreq._registeraliases(
         urllib.request,
         (
-            "AbstractHTTPHandler",
-            "BaseHandler",
-            "build_opener",
-            "FileHandler",
-            "FTPHandler",
-            "ftpwrapper",
-            "HTTPHandler",
-            "HTTPSHandler",
-            "install_opener",
-            "pathname2url",
-            "HTTPBasicAuthHandler",
-            "HTTPDigestAuthHandler",
-            "HTTPPasswordMgrWithDefaultRealm",
-            "ProxyHandler",
-            "Request",
-            "url2pathname",
-            "urlopen",
+            b"AbstractHTTPHandler",
+            b"BaseHandler",
+            b"build_opener",
+            b"FileHandler",
+            b"FTPHandler",
+            b"ftpwrapper",
+            b"HTTPHandler",
+            b"HTTPSHandler",
+            b"install_opener",
+            b"pathname2url",
+            b"HTTPBasicAuthHandler",
+            b"HTTPDigestAuthHandler",
+            b"HTTPPasswordMgrWithDefaultRealm",
+            b"ProxyHandler",
+            b"Request",
+            b"url2pathname",
+            b"urlopen",
         ),
     )
     import urllib.response
 
-    urlreq._registeraliases(urllib.response, ("addclosehook", "addinfourl",))
+    urlreq._registeraliases(urllib.response, (b"addclosehook", b"addinfourl",))
     import urllib.error
 
-    urlerr._registeraliases(urllib.error, ("HTTPError", "URLError",))
+    urlerr._registeraliases(urllib.error, (b"HTTPError", b"URLError",))
     import http.server
 
     httpserver._registeraliases(
         http.server,
         (
-            "HTTPServer",
-            "BaseHTTPRequestHandler",
-            "SimpleHTTPRequestHandler",
-            "CGIHTTPRequestHandler",
+            b"HTTPServer",
+            b"BaseHTTPRequestHandler",
+            b"SimpleHTTPRequestHandler",
+            b"CGIHTTPRequestHandler",
         ),
     )
 
@@ -145,48 +145,50 @@
     urlreq._registeraliases(
         urllib,
         (
-            "addclosehook",
-            "addinfourl",
-            "ftpwrapper",
-            "pathname2url",
-            "quote",
-            "splitattr",
-            "splitpasswd",
-            "splitport",
-            "splituser",
-            "unquote",
-            "url2pathname",
-            "urlencode",
+            b"addclosehook",
+            b"addinfourl",
+            b"ftpwrapper",
+            b"pathname2url",
+            b"quote",
+            b"splitattr",
+            b"splitpasswd",
+            b"splitport",
+            b"splituser",
+            b"unquote",
+            b"url2pathname",
+            b"urlencode",
         ),
     )
     urlreq._registeraliases(
         urllib2,
         (
-            "AbstractHTTPHandler",
-            "BaseHandler",
-            "build_opener",
-            "FileHandler",
-            "FTPHandler",
-            "HTTPBasicAuthHandler",
-            "HTTPDigestAuthHandler",
-            "HTTPHandler",
-            "HTTPPasswordMgrWithDefaultRealm",
-            "HTTPSHandler",
-            "install_opener",
-            "ProxyHandler",
-            "Request",
-            "urlopen",
+            b"AbstractHTTPHandler",
+            b"BaseHandler",
+            b"build_opener",
+            b"FileHandler",
+            b"FTPHandler",
+            b"HTTPBasicAuthHandler",
+            b"HTTPDigestAuthHandler",
+            b"HTTPHandler",
+            b"HTTPPasswordMgrWithDefaultRealm",
+            b"HTTPSHandler",
+            b"install_opener",
+            b"ProxyHandler",
+            b"Request",
+            b"urlopen",
         ),
     )
-    urlreq._registeraliases(urlparse, ("urlparse", "urlunparse",))
-    urlreq._registeralias(urlparse, "parse_qs", "parseqs")
-    urlreq._registeralias(urlparse, "parse_qsl", "parseqsl")
-    urlerr._registeraliases(urllib2, ("HTTPError", "URLError",))
+    urlreq._registeraliases(urlparse, (b"urlparse", b"urlunparse",))
+    urlreq._registeralias(urlparse, b"parse_qs", b"parseqs")
+    urlreq._registeralias(urlparse, b"parse_qsl", b"parseqsl")
+    urlerr._registeraliases(urllib2, (b"HTTPError", b"URLError",))
     httpserver._registeraliases(
-        BaseHTTPServer, ("HTTPServer", "BaseHTTPRequestHandler",)
+        BaseHTTPServer, (b"HTTPServer", b"BaseHTTPRequestHandler",)
     )
-    httpserver._registeraliases(SimpleHTTPServer, ("SimpleHTTPRequestHandler",))
-    httpserver._registeraliases(CGIHTTPServer, ("CGIHTTPRequestHandler",))
+    httpserver._registeraliases(
+        SimpleHTTPServer, (b"SimpleHTTPRequestHandler",)
+    )
+    httpserver._registeraliases(CGIHTTPServer, (b"CGIHTTPRequestHandler",))
 
     def gethost(req):
         return req.get_host()
--- a/mercurial/util.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/util.py	Sun Oct 06 09:48:39 2019 -0400
@@ -151,7 +151,7 @@
 # python 2.6 still have deprecation warning enabled by default. We do not want
 # to display anything to standard user so detect if we are running test and
 # only use python deprecation warning in this case.
-_dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
+_dowarn = bool(encoding.environ.get(b'HGEMITWARNINGS'))
 if _dowarn:
     # explicitly unfilter our warning for python 2.7
     #
@@ -186,19 +186,19 @@
     """
     if _dowarn:
         msg += (
-            "\n(compatibility will be dropped after Mercurial-%s,"
-            " update your code.)"
+            b"\n(compatibility will be dropped after Mercurial-%s,"
+            b" update your code.)"
         ) % version
         warnings.warn(pycompat.sysstr(msg), DeprecationWarning, stacklevel + 1)
 
 
 DIGESTS = {
-    'md5': hashlib.md5,
-    'sha1': hashlib.sha1,
-    'sha512': hashlib.sha512,
+    b'md5': hashlib.md5,
+    b'sha1': hashlib.sha1,
+    b'sha512': hashlib.sha512,
 }
 # List of digest types from strongest to weakest
-DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
+DIGESTS_BY_STRENGTH = [b'sha512', b'sha1', b'md5']
 
 for k in DIGESTS_BY_STRENGTH:
     assert k in DIGESTS
@@ -221,11 +221,11 @@
     'sha1'
     """
 
-    def __init__(self, digests, s=''):
+    def __init__(self, digests, s=b''):
         self._hashes = {}
         for k in digests:
             if k not in DIGESTS:
-                raise error.Abort(_('unknown digest type: %s') % k)
+                raise error.Abort(_(b'unknown digest type: %s') % k)
             self._hashes[k] = DIGESTS[k]()
         if s:
             self.update(s)
@@ -236,7 +236,7 @@
 
     def __getitem__(self, key):
         if key not in DIGESTS:
-            raise error.Abort(_('unknown digest type: %s') % k)
+            raise error.Abort(_(b'unknown digest type: %s') % k)
         return nodemod.hex(self._hashes[key].digest())
 
     def __iter__(self):
@@ -277,14 +277,14 @@
     def validate(self):
         if self._size != self._got:
             raise error.Abort(
-                _('size mismatch: expected %d, got %d')
+                _(b'size mismatch: expected %d, got %d')
                 % (self._size, self._got)
             )
         for k, v in self._digests.items():
             if v != self._digester[k]:
                 # i18n: first parameter is a digest name
                 raise error.Abort(
-                    _('%s mismatch: expected %s, got %s')
+                    _(b'%s mismatch: expected %s, got %s')
                     % (k, v, self._digester[k])
                 )
 
@@ -363,15 +363,15 @@
         if len(self._buffer) > 1:
             # this should not happen because both read and readline end with a
             # _frombuffer call that collapse it.
-            self._buffer = [''.join(self._buffer)]
+            self._buffer = [b''.join(self._buffer)]
             self._lenbuf = len(self._buffer[0])
         lfi = -1
         if self._buffer:
-            lfi = self._buffer[-1].find('\n')
+            lfi = self._buffer[-1].find(b'\n')
         while (not self._eof) and lfi < 0:
             self._fillbuffer()
             if self._buffer:
-                lfi = self._buffer[-1].find('\n')
+                lfi = self._buffer[-1].find(b'\n')
         size = lfi + 1
         if lfi < 0:  # end of file
             size = self._lenbuf
@@ -385,10 +385,10 @@
 
         The data are removed from the buffer."""
         if size == 0 or not self._buffer:
-            return ''
+            return b''
         buf = self._buffer[0]
         if len(self._buffer) > 1:
-            buf = ''.join(self._buffer)
+            buf = b''.join(self._buffer)
 
         data = buf[:size]
         buf = buf[len(data) :]
@@ -420,7 +420,7 @@
         # Empty files cannot be mmapped, but mmapread should still work.  Check
         # if the file is empty, and if so, return an empty buffer.
         if os.fstat(fd).st_size == 0:
-            return ''
+            return b''
         raise
 
 
@@ -787,29 +787,29 @@
     def _writedata(self, data):
         if not self.logdata:
             if self.logdataapis:
-                self.fh.write('\n')
+                self.fh.write(b'\n')
                 self.fh.flush()
             return
 
         # Simple case writes all data on a single line.
         if b'\n' not in data:
             if self.logdataapis:
-                self.fh.write(': %s\n' % stringutil.escapestr(data))
+                self.fh.write(b': %s\n' % stringutil.escapestr(data))
             else:
                 self.fh.write(
-                    '%s>     %s\n' % (self.name, stringutil.escapestr(data))
+                    b'%s>     %s\n' % (self.name, stringutil.escapestr(data))
                 )
             self.fh.flush()
             return
 
         # Data with newlines is written to multiple lines.
         if self.logdataapis:
-            self.fh.write(':\n')
+            self.fh.write(b':\n')
 
         lines = data.splitlines(True)
         for line in lines:
             self.fh.write(
-                '%s>     %s\n' % (self.name, stringutil.escapestr(line))
+                b'%s>     %s\n' % (self.name, stringutil.escapestr(line))
             )
         self.fh.flush()
 
@@ -832,9 +832,9 @@
             return
         # Python 3 can return None from reads at EOF instead of empty strings.
         if res is None:
-            res = ''
-
-        if size == -1 and res == '':
+            res = b''
+
+        if size == -1 and res == b'':
             # Suppress pointless read(-1) calls that return
             # nothing. These happen _a lot_ on Python 3, and there
             # doesn't seem to be a better workaround to have matching
@@ -842,7 +842,7 @@
             return
 
         if self.logdataapis:
-            self.fh.write('%s> read(%d) -> %d' % (self.name, size, len(res)))
+            self.fh.write(b'%s> read(%d) -> %d' % (self.name, size, len(res)))
 
         self._writedata(res)
 
@@ -851,7 +851,7 @@
             return
 
         if self.logdataapis:
-            self.fh.write('%s> readline() -> %d' % (self.name, len(res)))
+            self.fh.write(b'%s> readline() -> %d' % (self.name, len(res)))
 
         self._writedata(res)
 
@@ -861,7 +861,7 @@
 
         if self.logdataapis:
             self.fh.write(
-                '%s> readinto(%d) -> %r' % (self.name, len(dest), res)
+                b'%s> readinto(%d) -> %r' % (self.name, len(dest), res)
             )
 
         data = dest[0:res] if res is not None else b''
@@ -883,7 +883,7 @@
             res = len(data)
 
         if self.logdataapis:
-            self.fh.write('%s> write(%d) -> %r' % (self.name, len(data), res))
+            self.fh.write(b'%s> write(%d) -> %r' % (self.name, len(data), res))
 
         self._writedata(data)
 
@@ -891,7 +891,7 @@
         if not self.writes:
             return
 
-        self.fh.write('%s> flush() -> %r\n' % (self.name, res))
+        self.fh.write(b'%s> flush() -> %r\n' % (self.name, res))
 
     # For observedbufferedinputpipe.
     def bufferedread(self, res, size):
@@ -900,7 +900,7 @@
 
         if self.logdataapis:
             self.fh.write(
-                '%s> bufferedread(%d) -> %d' % (self.name, size, len(res))
+                b'%s> bufferedread(%d) -> %d' % (self.name, size, len(res))
             )
 
         self._writedata(res)
@@ -911,7 +911,7 @@
 
         if self.logdataapis:
             self.fh.write(
-                '%s> bufferedreadline() -> %d' % (self.name, len(res))
+                b'%s> bufferedreadline() -> %d' % (self.name, len(res))
             )
 
         self._writedata(res)
@@ -958,7 +958,7 @@
         if not self.states:
             return
 
-        self.fh.write('%s> makefile(%r, %r)\n' % (self.name, mode, bufsize))
+        self.fh.write(b'%s> makefile(%r, %r)\n' % (self.name, mode, bufsize))
 
     def recv(self, res, size, flags=0):
         if not self.reads:
@@ -966,7 +966,7 @@
 
         if self.logdataapis:
             self.fh.write(
-                '%s> recv(%d, %d) -> %d' % (self.name, size, flags, len(res))
+                b'%s> recv(%d, %d) -> %d' % (self.name, size, flags, len(res))
             )
         self._writedata(res)
 
@@ -976,7 +976,7 @@
 
         if self.logdataapis:
             self.fh.write(
-                '%s> recvfrom(%d, %d) -> %d'
+                b'%s> recvfrom(%d, %d) -> %d'
                 % (self.name, size, flags, len(res[0]))
             )
 
@@ -988,7 +988,7 @@
 
         if self.logdataapis:
             self.fh.write(
-                '%s> recvfrom_into(%d, %d) -> %d'
+                b'%s> recvfrom_into(%d, %d) -> %d'
                 % (self.name, size, flags, res[0])
             )
 
@@ -1000,7 +1000,7 @@
 
         if self.logdataapis:
             self.fh.write(
-                '%s> recv_into(%d, %d) -> %d' % (self.name, size, flags, res)
+                b'%s> recv_into(%d, %d) -> %d' % (self.name, size, flags, res)
             )
 
         self._writedata(buf[0:res])
@@ -1010,7 +1010,7 @@
             return
 
         self.fh.write(
-            '%s> send(%d, %d) -> %d' % (self.name, len(data), flags, len(res))
+            b'%s> send(%d, %d) -> %d' % (self.name, len(data), flags, len(res))
         )
         self._writedata(data)
 
@@ -1020,7 +1020,9 @@
 
         if self.logdataapis:
             # Returns None on success. So don't bother reporting return value.
-            self.fh.write('%s> sendall(%d, %d)' % (self.name, len(data), flags))
+            self.fh.write(
+                b'%s> sendall(%d, %d)' % (self.name, len(data), flags)
+            )
 
         self._writedata(data)
 
@@ -1035,7 +1037,7 @@
 
         if self.logdataapis:
             self.fh.write(
-                '%s> sendto(%d, %d, %r) -> %d'
+                b'%s> sendto(%d, %d, %r) -> %d'
                 % (self.name, len(data), flags, address, res)
             )
 
@@ -1045,26 +1047,26 @@
         if not self.states:
             return
 
-        self.fh.write('%s> setblocking(%r)\n' % (self.name, flag))
+        self.fh.write(b'%s> setblocking(%r)\n' % (self.name, flag))
 
     def settimeout(self, res, value):
         if not self.states:
             return
 
-        self.fh.write('%s> settimeout(%r)\n' % (self.name, value))
+        self.fh.write(b'%s> settimeout(%r)\n' % (self.name, value))
 
     def gettimeout(self, res):
         if not self.states:
             return
 
-        self.fh.write('%s> gettimeout() -> %f\n' % (self.name, res))
+        self.fh.write(b'%s> gettimeout() -> %f\n' % (self.name, res))
 
     def setsockopt(self, res, level, optname, value):
         if not self.states:
             return
 
         self.fh.write(
-            '%s> setsockopt(%r, %r, %r) -> %r\n'
+            b'%s> setsockopt(%r, %r, %r) -> %r\n'
             % (self.name, level, optname, value, res)
         )
 
@@ -1100,7 +1102,7 @@
 
         return __version__.version
     except ImportError:
-        return 'unknown'
+        return b'unknown'
 
 
 def versiontuple(v=None, n=4):
@@ -1162,14 +1164,14 @@
         v = version()
     m = remod.match(br'(\d+(?:\.\d+){,2})[\+-]?(.*)', v)
     if not m:
-        vparts, extra = '', v
+        vparts, extra = b'', v
     elif m.group(2):
         vparts, extra = m.groups()
     else:
         vparts, extra = m.group(1), None
 
     vints = []
-    for i in vparts.split('.'):
+    for i in vparts.split(b'.'):
         try:
             vints.append(int(i))
         except ValueError:
@@ -1744,11 +1746,11 @@
                     min = nmin
                 if min > max:
                     min = max
-            yield ''.join(buf)
+            yield b''.join(buf)
             blen = 0
             buf = []
     if buf:
-        yield ''.join(buf)
+        yield b''.join(buf)
 
 
 def always(fn):
@@ -1806,19 +1808,19 @@
     if os.path.isabs(n1):
         if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
             return os.path.join(root, localpath(n2))
-        n2 = '/'.join((pconvert(root), n2))
-    a, b = splitpath(n1), n2.split('/')
+        n2 = b'/'.join((pconvert(root), n2))
+    a, b = splitpath(n1), n2.split(b'/')
     a.reverse()
     b.reverse()
     while a and b and a[-1] == b[-1]:
         a.pop()
         b.pop()
     b.reverse()
-    return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
+    return pycompat.ossep.join(([b'..'] * len(a)) + b) or b'.'
 
 
 # the location of data files matching the source code
-if procutil.mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
+if procutil.mainfrozen() and getattr(sys, 'frozen', None) != b'macosx_app':
     # executable version (py2exe) doesn't support __file__
     datapath = os.path.dirname(pycompat.sysexecutable)
 else:
@@ -1843,19 +1845,19 @@
 
 # a whilelist of known filesystems where hardlink works reliably
 _hardlinkfswhitelist = {
-    'apfs',
-    'btrfs',
-    'ext2',
-    'ext3',
-    'ext4',
-    'hfs',
-    'jfs',
-    'NTFS',
-    'reiserfs',
-    'tmpfs',
-    'ufs',
-    'xfs',
-    'zfs',
+    b'apfs',
+    b'btrfs',
+    b'ext2',
+    b'ext3',
+    b'ext4',
+    b'hfs',
+    b'jfs',
+    b'NTFS',
+    b'reiserfs',
+    b'tmpfs',
+    b'ufs',
+    b'xfs',
+    b'zfs',
 }
 
 
@@ -1920,7 +1922,7 @@
 
     def settopic():
         if progress:
-            progress.topic = _('linking') if hardlink else _('copying')
+            progress.topic = _(b'linking') if hardlink else _(b'copying')
 
     if os.path.isdir(src):
         if hardlink is None:
@@ -1958,30 +1960,30 @@
 
 
 _winreservednames = {
-    'con',
-    'prn',
-    'aux',
-    'nul',
-    'com1',
-    'com2',
-    'com3',
-    'com4',
-    'com5',
-    'com6',
-    'com7',
-    'com8',
-    'com9',
-    'lpt1',
-    'lpt2',
-    'lpt3',
-    'lpt4',
-    'lpt5',
-    'lpt6',
-    'lpt7',
-    'lpt8',
-    'lpt9',
+    b'con',
+    b'prn',
+    b'aux',
+    b'nul',
+    b'com1',
+    b'com2',
+    b'com3',
+    b'com4',
+    b'com5',
+    b'com6',
+    b'com7',
+    b'com8',
+    b'com9',
+    b'lpt1',
+    b'lpt2',
+    b'lpt3',
+    b'lpt4',
+    b'lpt5',
+    b'lpt6',
+    b'lpt7',
+    b'lpt8',
+    b'lpt9',
 }
-_winreservedchars = ':*?"<>|'
+_winreservedchars = b':*?"<>|'
 
 
 def checkwinfilename(path):
@@ -2008,33 +2010,39 @@
     >>> checkwinfilename(b"foo\\/bar")
     "directory name ends with '\\', which is invalid on Windows"
     '''
-    if path.endswith('\\'):
-        return _("filename ends with '\\', which is invalid on Windows")
-    if '\\/' in path:
-        return _("directory name ends with '\\', which is invalid on Windows")
-    for n in path.replace('\\', '/').split('/'):
+    if path.endswith(b'\\'):
+        return _(b"filename ends with '\\', which is invalid on Windows")
+    if b'\\/' in path:
+        return _(b"directory name ends with '\\', which is invalid on Windows")
+    for n in path.replace(b'\\', b'/').split(b'/'):
         if not n:
             continue
         for c in _filenamebytestr(n):
             if c in _winreservedchars:
                 return (
-                    _("filename contains '%s', which is reserved " "on Windows")
+                    _(
+                        b"filename contains '%s', which is reserved "
+                        b"on Windows"
+                    )
                     % c
                 )
             if ord(c) <= 31:
                 return _(
-                    "filename contains '%s', which is invalid " "on Windows"
+                    b"filename contains '%s', which is invalid " b"on Windows"
                 ) % stringutil.escapestr(c)
-        base = n.split('.')[0]
+        base = n.split(b'.')[0]
         if base and base.lower() in _winreservednames:
             return (
-                _("filename contains '%s', which is reserved " "on Windows")
+                _(b"filename contains '%s', which is reserved " b"on Windows")
                 % base
             )
         t = n[-1:]
-        if t in '. ' and n not in '..':
+        if t in b'. ' and n not in b'..':
             return (
-                _("filename ends with '%s', which is not allowed " "on Windows")
+                _(
+                    b"filename ends with '%s', which is not allowed "
+                    b"on Windows"
+                )
                 % t
             )
 
@@ -2078,7 +2086,7 @@
             raise
     except AttributeError:  # no symlink in os
         pass
-    with posixfile(pathname, 'rb') as fp:
+    with posixfile(pathname, b'rb') as fp:
         return fp.read()
 
 
@@ -2130,7 +2138,7 @@
         global _re2
         try:
             # check if match works, see issue3964
-            _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
+            _re2 = bool(re2.match(r'\[([^\[]+)\]', b'[ui]'))
         except ImportError:
             _re2 = False
 
@@ -2144,9 +2152,9 @@
             self._checkre2()
         if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
             if flags & remod.IGNORECASE:
-                pat = '(?i)' + pat
+                pat = b'(?i)' + pat
             if flags & remod.MULTILINE:
-                pat = '(?m)' + pat
+                pat = b'(?m)' + pat
             try:
                 return re2.compile(pat)
             except re2.error:
@@ -2192,7 +2200,7 @@
     if pycompat.osaltsep:
         seps = seps + pycompat.osaltsep
     # Protect backslashes. This gets silly very quickly.
-    seps.replace('\\', '\\\\')
+    seps.replace(b'\\', b'\\\\')
     pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
     dir = os.path.normpath(root)
     result = []
@@ -2215,7 +2223,7 @@
         result.append(found or part)
         dir = os.path.join(dir, part)
 
-    return ''.join(result)
+    return b''.join(result)
 
 
 def checknlink(testfile):
@@ -2226,12 +2234,12 @@
     f1, f2, fp = None, None, None
     try:
         fd, f1 = pycompat.mkstemp(
-            prefix='.%s-' % os.path.basename(testfile),
-            suffix='1~',
+            prefix=b'.%s-' % os.path.basename(testfile),
+            suffix=b'1~',
             dir=os.path.dirname(testfile),
         )
         os.close(fd)
-        f2 = '%s2~' % f1[:-2]
+        f2 = b'%s2~' % f1[:-2]
 
         oslink(f1, f2)
         # nlinks() may behave differently for files on Windows shares if
@@ -2280,7 +2288,7 @@
     Returns the name of the temporary file.
     """
     d, fn = os.path.split(name)
-    fd, temp = pycompat.mkstemp(prefix='.%s-' % fn, suffix='~', dir=d)
+    fd, temp = pycompat.mkstemp(prefix=b'.%s-' % fn, suffix=b'~', dir=d)
     os.close(fd)
     # Temporary files are created with mode 0600, which is usually not
     # what we want.  If the original file already exists, just copy
@@ -2291,14 +2299,14 @@
         return temp
     try:
         try:
-            ifp = posixfile(name, "rb")
+            ifp = posixfile(name, b"rb")
         except IOError as inst:
             if inst.errno == errno.ENOENT:
                 return temp
             if not getattr(inst, 'filename', None):
                 inst.filename = name
             raise
-        ofp = posixfile(temp, "wb")
+        ofp = posixfile(temp, b"wb")
         for chunk in filechunkiter(ifp):
             ofp.write(chunk)
         ifp.close()
@@ -2432,13 +2440,13 @@
     or repo.wlock).
     '''
 
-    def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
+    def __init__(self, name, mode=b'w+b', createmode=None, checkambig=False):
         self.__name = name  # permanent name
         self._tempname = mktempcopy(
             name,
-            emptyok=('w' in mode),
+            emptyok=(b'w' in mode),
             createmode=createmode,
-            enforcewritable=('w' in mode),
+            enforcewritable=(b'w' in mode),
         )
 
         self._fp = posixfile(self._tempname, mode)
@@ -2541,17 +2549,17 @@
 
 
 def readfile(path):
-    with open(path, 'rb') as fp:
+    with open(path, b'rb') as fp:
         return fp.read()
 
 
 def writefile(path, text):
-    with open(path, 'wb') as fp:
+    with open(path, b'wb') as fp:
         fp.write(text)
 
 
 def appendfile(path, text):
-    with open(path, 'ab') as fp:
+    with open(path, b'ab') as fp:
         fp.write(text)
 
 
@@ -2583,7 +2591,7 @@
 
         If size parameter is omitted, read everything"""
         if l is None:
-            return ''.join(self.iter)
+            return b''.join(self.iter)
 
         left = l
         buf = []
@@ -2635,7 +2643,7 @@
                 self._chunkoffset += left
                 left -= chunkremaining
 
-        return ''.join(buf)
+        return b''.join(buf)
 
 
 def filechunkiter(f, size=131072, limit=None):
@@ -2727,23 +2735,23 @@
     ParseError: fromline must be strictly positive
     """
     if toline - fromline < 0:
-        raise error.ParseError(_("line range must be positive"))
+        raise error.ParseError(_(b"line range must be positive"))
     if fromline < 1:
-        raise error.ParseError(_("fromline must be strictly positive"))
+        raise error.ParseError(_(b"fromline must be strictly positive"))
     return fromline - 1, toline
 
 
 bytecount = unitcountfn(
-    (100, 1 << 30, _('%.0f GB')),
-    (10, 1 << 30, _('%.1f GB')),
-    (1, 1 << 30, _('%.2f GB')),
-    (100, 1 << 20, _('%.0f MB')),
-    (10, 1 << 20, _('%.1f MB')),
-    (1, 1 << 20, _('%.2f MB')),
-    (100, 1 << 10, _('%.0f KB')),
-    (10, 1 << 10, _('%.1f KB')),
-    (1, 1 << 10, _('%.2f KB')),
-    (1, 1, _('%.0f bytes')),
+    (100, 1 << 30, _(b'%.0f GB')),
+    (10, 1 << 30, _(b'%.1f GB')),
+    (1, 1 << 30, _(b'%.2f GB')),
+    (100, 1 << 20, _(b'%.0f MB')),
+    (10, 1 << 20, _(b'%.1f MB')),
+    (1, 1 << 20, _(b'%.2f MB')),
+    (100, 1 << 10, _(b'%.0f KB')),
+    (10, 1 << 10, _(b'%.1f KB')),
+    (1, 1 << 10, _(b'%.2f KB')),
+    (1, 1, _(b'%.0f bytes')),
 )
 
 
@@ -2771,18 +2779,18 @@
 
 
 def tolf(s):
-    return _eolre.sub('\n', s)
+    return _eolre.sub(b'\n', s)
 
 
 def tocrlf(s):
-    return _eolre.sub('\r\n', s)
+    return _eolre.sub(b'\r\n', s)
 
 
 def _crlfwriter(fp):
     return transformingwriter(fp, tocrlf)
 
 
-if pycompat.oslinesep == '\r\n':
+if pycompat.oslinesep == b'\r\n':
     tonativeeol = tocrlf
     fromnativeeol = tolf
     nativeeolwriter = _crlfwriter
@@ -2791,7 +2799,7 @@
     fromnativeeol = pycompat.identity
     nativeeolwriter = pycompat.identity
 
-if pyplatform.python_implementation() == 'CPython' and sys.version_info < (
+if pyplatform.python_implementation() == b'CPython' and sys.version_info < (
     3,
     0,
 ):
@@ -2822,14 +2830,14 @@
     if sys.version_info >= (2, 7, 4):
         # fp.readline deals with EINTR correctly, use it as a workaround.
         def _safeiterfile(fp):
-            return iter(fp.readline, '')
+            return iter(fp.readline, b'')
 
     else:
         # fp.read* are broken too, manually deal with EINTR in a stupid way.
         # note: this may block longer than necessary because of bufsize.
         def _safeiterfile(fp, bufsize=4096):
             fd = fp.fileno()
-            line = ''
+            line = b''
             while True:
                 try:
                     buf = os.read(fd, bufsize)
@@ -2840,11 +2848,11 @@
                     else:
                         raise
                 line += buf
-                if '\n' in buf:
+                if b'\n' in buf:
                     splitted = line.splitlines(True)
-                    line = ''
+                    line = b''
                     for l in splitted:
-                        if l[-1] == '\n':
+                        if l[-1] == b'\n':
                             yield l
                         else:
                             line = l
@@ -2893,9 +2901,9 @@
     its escaping.
     """
     fn = fn or (lambda s: s)
-    patterns = '|'.join(mapping.keys())
+    patterns = b'|'.join(mapping.keys())
     if escape_prefix:
-        patterns += '|' + prefix
+        patterns += b'|' + prefix
         if len(prefix) > 1:
             prefix_char = prefix[1:]
         else:
@@ -2921,7 +2929,7 @@
         return socket.getservbyname(pycompat.sysstr(port))
     except socket.error:
         raise error.Abort(
-            _("no port number associated with service '%s'") % port
+            _(b"no port number associated with service '%s'") % port
         )
 
 
@@ -2999,38 +3007,38 @@
     <url scheme: 'http'>
     """
 
-    _safechars = "!~*'()+"
-    _safepchars = "/!~*'()+:\\"
-    _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
+    _safechars = b"!~*'()+"
+    _safepchars = b"/!~*'()+:\\"
+    _matchscheme = remod.compile(b'^[a-zA-Z0-9+.\\-]+:').match
 
     def __init__(self, path, parsequery=True, parsefragment=True):
         # We slowly chomp away at path until we have only the path left
         self.scheme = self.user = self.passwd = self.host = None
         self.port = self.path = self.query = self.fragment = None
         self._localpath = True
-        self._hostport = ''
+        self._hostport = b''
         self._origpath = path
 
-        if parsefragment and '#' in path:
-            path, self.fragment = path.split('#', 1)
+        if parsefragment and b'#' in path:
+            path, self.fragment = path.split(b'#', 1)
 
         # special case for Windows drive letters and UNC paths
-        if hasdriveletter(path) or path.startswith('\\\\'):
+        if hasdriveletter(path) or path.startswith(b'\\\\'):
             self.path = path
             return
 
         # For compatibility reasons, we can't handle bundle paths as
         # normal URLS
-        if path.startswith('bundle:'):
-            self.scheme = 'bundle'
+        if path.startswith(b'bundle:'):
+            self.scheme = b'bundle'
             path = path[7:]
-            if path.startswith('//'):
+            if path.startswith(b'//'):
                 path = path[2:]
             self.path = path
             return
 
         if self._matchscheme(path):
-            parts = path.split(':', 1)
+            parts = path.split(b':', 1)
             if parts[0]:
                 self.scheme, path = parts
                 self._localpath = False
@@ -3038,23 +3046,23 @@
         if not path:
             path = None
             if self._localpath:
-                self.path = ''
+                self.path = b''
                 return
         else:
             if self._localpath:
                 self.path = path
                 return
 
-            if parsequery and '?' in path:
-                path, self.query = path.split('?', 1)
+            if parsequery and b'?' in path:
+                path, self.query = path.split(b'?', 1)
                 if not path:
                     path = None
                 if not self.query:
                     self.query = None
 
             # // is required to specify a host/authority
-            if path and path.startswith('//'):
-                parts = path[2:].split('/', 1)
+            if path and path.startswith(b'//'):
+                parts = path[2:].split(b'/', 1)
                 if len(parts) > 1:
                     self.host, path = parts
                 else:
@@ -3065,37 +3073,41 @@
                     # path of file:///d is /d
                     # path of file:///d:/ is d:/, not /d:/
                     if path and not hasdriveletter(path):
-                        path = '/' + path
-
-            if self.host and '@' in self.host:
-                self.user, self.host = self.host.rsplit('@', 1)
-                if ':' in self.user:
-                    self.user, self.passwd = self.user.split(':', 1)
+                        path = b'/' + path
+
+            if self.host and b'@' in self.host:
+                self.user, self.host = self.host.rsplit(b'@', 1)
+                if b':' in self.user:
+                    self.user, self.passwd = self.user.split(b':', 1)
                 if not self.host:
                     self.host = None
 
             # Don't split on colons in IPv6 addresses without ports
             if (
                 self.host
-                and ':' in self.host
-                and not (self.host.startswith('[') and self.host.endswith(']'))
+                and b':' in self.host
+                and not (
+                    self.host.startswith(b'[') and self.host.endswith(b']')
+                )
             ):
                 self._hostport = self.host
-                self.host, self.port = self.host.rsplit(':', 1)
+                self.host, self.port = self.host.rsplit(b':', 1)
                 if not self.host:
                     self.host = None
 
             if (
                 self.host
-                and self.scheme == 'file'
-                and self.host not in ('localhost', '127.0.0.1', '[::1]')
+                and self.scheme == b'file'
+                and self.host not in (b'localhost', b'127.0.0.1', b'[::1]')
             ):
-                raise error.Abort(_('file:// URLs can only refer to localhost'))
+                raise error.Abort(
+                    _(b'file:// URLs can only refer to localhost')
+                )
 
         self.path = path
 
         # leave the query string escaped
-        for a in ('user', 'passwd', 'host', 'port', 'path', 'fragment'):
+        for a in (b'user', b'passwd', b'host', b'port', b'path', b'fragment'):
             v = getattr(self, a)
             if v is not None:
                 setattr(self, a, urlreq.unquote(v))
@@ -3104,19 +3116,19 @@
     def __repr__(self):
         attrs = []
         for a in (
-            'scheme',
-            'user',
-            'passwd',
-            'host',
-            'port',
-            'path',
-            'query',
-            'fragment',
+            b'scheme',
+            b'user',
+            b'passwd',
+            b'host',
+            b'port',
+            b'path',
+            b'query',
+            b'fragment',
         ):
             v = getattr(self, a)
             if v is not None:
-                attrs.append('%s: %r' % (a, pycompat.bytestr(v)))
-        return '<url %s>' % ', '.join(attrs)
+                attrs.append(b'%s: %r' % (a, pycompat.bytestr(v)))
+        return b'<url %s>' % b', '.join(attrs)
 
     def __bytes__(self):
         r"""Join the URL's components back into a URL string.
@@ -3154,38 +3166,38 @@
         """
         if self._localpath:
             s = self.path
-            if self.scheme == 'bundle':
-                s = 'bundle:' + s
+            if self.scheme == b'bundle':
+                s = b'bundle:' + s
             if self.fragment:
-                s += '#' + self.fragment
+                s += b'#' + self.fragment
             return s
 
-        s = self.scheme + ':'
+        s = self.scheme + b':'
         if self.user or self.passwd or self.host:
-            s += '//'
+            s += b'//'
         elif self.scheme and (
             not self.path
-            or self.path.startswith('/')
+            or self.path.startswith(b'/')
             or hasdriveletter(self.path)
         ):
-            s += '//'
+            s += b'//'
             if hasdriveletter(self.path):
-                s += '/'
+                s += b'/'
         if self.user:
             s += urlreq.quote(self.user, safe=self._safechars)
         if self.passwd:
-            s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
+            s += b':' + urlreq.quote(self.passwd, safe=self._safechars)
         if self.user or self.passwd:
-            s += '@'
+            s += b'@'
         if self.host:
-            if not (self.host.startswith('[') and self.host.endswith(']')):
+            if not (self.host.startswith(b'[') and self.host.endswith(b']')):
                 s += urlreq.quote(self.host)
             else:
                 s += self.host
         if self.port:
-            s += ':' + urlreq.quote(self.port)
+            s += b':' + urlreq.quote(self.port)
         if self.host:
-            s += '/'
+            s += b'/'
         if self.path:
             # TODO: similar to the query string, we should not unescape the
             # path when we store it, the path might contain '%2f' = '/',
@@ -3193,9 +3205,9 @@
             s += urlreq.quote(self.path, safe=self._safepchars)
         if self.query:
             # we store the query in escaped form.
-            s += '?' + self.query
+            s += b'?' + self.query
         if self.fragment is not None:
-            s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
+            s += b'#' + urlreq.quote(self.fragment, safe=self._safepchars)
         return s
 
     __str__ = encoding.strmethod(__bytes__)
@@ -3213,37 +3225,39 @@
         # URIs must not contain credentials. The host is passed in the
         # URIs list because Python < 2.4.3 uses only that to search for
         # a password.
-        return (s, (None, (s, self.host), self.user, self.passwd or ''))
+        return (s, (None, (s, self.host), self.user, self.passwd or b''))
 
     def isabs(self):
-        if self.scheme and self.scheme != 'file':
+        if self.scheme and self.scheme != b'file':
             return True  # remote URL
         if hasdriveletter(self.path):
             return True  # absolute for our purposes - can't be joined()
         if self.path.startswith(br'\\'):
             return True  # Windows UNC path
-        if self.path.startswith('/'):
+        if self.path.startswith(b'/'):
             return True  # POSIX-style
         return False
 
     def localpath(self):
-        if self.scheme == 'file' or self.scheme == 'bundle':
-            path = self.path or '/'
+        if self.scheme == b'file' or self.scheme == b'bundle':
+            path = self.path or b'/'
             # For Windows, we need to promote hosts containing drive
             # letters to paths with drive letters.
             if hasdriveletter(self._hostport):
-                path = self._hostport + '/' + self.path
+                path = self._hostport + b'/' + self.path
             elif (
                 self.host is not None and self.path and not hasdriveletter(path)
             ):
-                path = '/' + path
+                path = b'/' + path
             return path
         return self._origpath
 
     def islocal(self):
         '''whether localpath will return something that posixfile can open'''
         return (
-            not self.scheme or self.scheme == 'file' or self.scheme == 'bundle'
+            not self.scheme
+            or self.scheme == b'file'
+            or self.scheme == b'bundle'
         )
 
 
@@ -3252,7 +3266,7 @@
 
 
 def hasdriveletter(path):
-    return path and path[1:2] == ':' and path[0:1].isalpha()
+    return path and path[1:2] == b':' and path[0:1].isalpha()
 
 
 def urllocalpath(path):
@@ -3270,9 +3284,9 @@
     Raises an error.Abort when the url is unsafe.
     """
     path = urlreq.unquote(path)
-    if path.startswith('ssh://-') or path.startswith('svn+ssh://-'):
+    if path.startswith(b'ssh://-') or path.startswith(b'svn+ssh://-'):
         raise error.Abort(
-            _('potentially unsafe url: %r') % (pycompat.bytestr(path),)
+            _(b'potentially unsafe url: %r') % (pycompat.bytestr(path),)
         )
 
 
@@ -3280,7 +3294,7 @@
     '''hide user credential in a url string'''
     u = url(u)
     if u.passwd:
-        u.passwd = '***'
+        u.passwd = b'***'
     return bytes(u)
 
 
@@ -3292,19 +3306,19 @@
 
 
 timecount = unitcountfn(
-    (1, 1e3, _('%.0f s')),
-    (100, 1, _('%.1f s')),
-    (10, 1, _('%.2f s')),
-    (1, 1, _('%.3f s')),
-    (100, 0.001, _('%.1f ms')),
-    (10, 0.001, _('%.2f ms')),
-    (1, 0.001, _('%.3f ms')),
-    (100, 0.000001, _('%.1f us')),
-    (10, 0.000001, _('%.2f us')),
-    (1, 0.000001, _('%.3f us')),
-    (100, 0.000000001, _('%.1f ns')),
-    (10, 0.000000001, _('%.2f ns')),
-    (1, 0.000000001, _('%.3f ns')),
+    (1, 1e3, _(b'%.0f s')),
+    (100, 1, _(b'%.1f s')),
+    (10, 1, _(b'%.2f s')),
+    (1, 1, _(b'%.3f s')),
+    (100, 0.001, _(b'%.1f ms')),
+    (10, 0.001, _(b'%.2f ms')),
+    (1, 0.001, _(b'%.3f ms')),
+    (100, 0.000001, _(b'%.1f us')),
+    (10, 0.000001, _(b'%.2f us')),
+    (1, 0.000001, _(b'%.3f us')),
+    (100, 0.000000001, _(b'%.1f ns')),
+    (10, 0.000000001, _(b'%.2f ns')),
+    (1, 0.000000001, _(b'%.3f ns')),
 )
 
 
@@ -3322,7 +3336,7 @@
     level = attr.ib(default=1)
 
     def __bytes__(self):
-        return timecount(self.elapsed) if self.elapsed else '<unknown>'
+        return timecount(self.elapsed) if self.elapsed else b'<unknown>'
 
     __str__ = encoding.strmethod(__bytes__)
 
@@ -3366,9 +3380,9 @@
             result = func(*args, **kwargs)
         stderr = procutil.stderr
         stderr.write(
-            '%s%s: %s\n'
+            b'%s%s: %s\n'
             % (
-                ' ' * time_stats.level * 2,
+                b' ' * time_stats.level * 2,
                 pycompat.bytestr(func.__name__),
                 time_stats,
             )
@@ -3379,13 +3393,13 @@
 
 
 _sizeunits = (
-    ('m', 2 ** 20),
-    ('k', 2 ** 10),
-    ('g', 2 ** 30),
-    ('kb', 2 ** 10),
-    ('mb', 2 ** 20),
-    ('gb', 2 ** 30),
-    ('b', 1),
+    (b'm', 2 ** 20),
+    (b'k', 2 ** 10),
+    (b'g', 2 ** 30),
+    (b'kb', 2 ** 10),
+    (b'mb', 2 ** 20),
+    (b'gb', 2 ** 30),
+    (b'b', 1),
 )
 
 
@@ -3406,7 +3420,7 @@
                 return int(float(t[: -len(k)]) * u)
         return int(t)
     except ValueError:
-        raise error.ParseError(_("couldn't parse size: %s") % s)
+        raise error.ParseError(_(b"couldn't parse size: %s") % s)
 
 
 class hooks(object):
@@ -3428,7 +3442,7 @@
         return results
 
 
-def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%d', depth=0):
+def getstackframes(skip=0, line=b' %-*s in %s\n', fileline=b'%s:%d', depth=0):
     '''Yields lines for a nicely formatted stacktrace.
     Skips the 'skip' last entries, then return the last 'depth' entries.
     Each file+linenumber is formatted according to fileline.
@@ -3454,7 +3468,11 @@
 
 
 def debugstacktrace(
-    msg='stacktrace', skip=0, f=procutil.stderr, otherf=procutil.stdout, depth=0
+    msg=b'stacktrace',
+    skip=0,
+    f=procutil.stderr,
+    otherf=procutil.stdout,
+    depth=0,
 ):
     '''Writes a message to f (stderr) with a nicely formatted stacktrace.
     Skips the 'skip' entries closest to the call, then show 'depth' entries.
@@ -3464,7 +3482,7 @@
     '''
     if otherf:
         otherf.flush()
-    f.write('%s at:\n' % msg.rstrip())
+    f.write(b'%s at:\n' % msg.rstrip())
     for line in getstackframes(skip + 1, depth=depth):
         f.write(line)
     f.flush()
@@ -3482,7 +3500,7 @@
                     addpath(f)
         elif skip is not None:
             raise error.ProgrammingError(
-                "skip character is only supported " "with a dict source"
+                b"skip character is only supported " b"with a dict source"
             )
         else:
             for f in map:
@@ -3519,11 +3537,11 @@
 
 
 def finddirs(path):
-    pos = path.rfind('/')
+    pos = path.rfind(b'/')
     while pos != -1:
         yield path[:pos]
-        pos = path.rfind('/', 0, pos)
-    yield ''
+        pos = path.rfind(b'/', 0, pos)
+    yield b''
 
 
 # convenient shortcut
@@ -3545,11 +3563,11 @@
     if others is None:
         others = set()
 
-    fn = '%s~%s' % (f, tag)
+    fn = b'%s~%s' % (f, tag)
     if fn not in ctx and fn not in others:
         return fn
     for n in itertools.count(1):
-        fn = '%s~%s~%s' % (f, tag, n)
+        fn = b'%s~%s~%s' % (f, tag, n)
         if fn not in ctx and fn not in others:
             return fn
 
@@ -3559,7 +3577,7 @@
     s = stream.read(n)
     if len(s) < n:
         raise error.Abort(
-            _("stream ended unexpectedly" " (got %d bytes, expected %d)")
+            _(b"stream ended unexpectedly" b" (got %d bytes, expected %d)")
             % (len(s), n)
         )
     return s
@@ -3589,7 +3607,7 @@
     ProgrammingError: negative value for uvarint: -1
     """
     if value < 0:
-        raise error.ProgrammingError('negative value for uvarint: %d' % value)
+        raise error.ProgrammingError(b'negative value for uvarint: %d' % value)
     bits = value & 0x7F
     value >>= 7
     bytes = []
@@ -3599,7 +3617,7 @@
         value >>= 7
     bytes.append(pycompat.bytechr(bits))
 
-    return ''.join(bytes)
+    return b''.join(bytes)
 
 
 def uvarintdecodestream(fh):
--- a/mercurial/utils/cborutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/utils/cborutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -125,7 +125,7 @@
 
 def streamencodeint(v):
     if v >= 18446744073709551616 or v < -18446744073709551616:
-        raise ValueError('big integers not supported')
+        raise ValueError(b'big integers not supported')
 
     if v >= 0:
         yield encodelength(MAJOR_TYPE_UINT, v)
@@ -241,7 +241,7 @@
             break
 
     if not fn:
-        raise ValueError('do not know how to encode %s' % type(v))
+        raise ValueError(b'do not know how to encode %s' % type(v))
 
     return fn(v)
 
@@ -263,9 +263,9 @@
 
 
 STRUCT_BIG_UBYTE = struct.Struct(r'>B')
-STRUCT_BIG_USHORT = struct.Struct('>H')
-STRUCT_BIG_ULONG = struct.Struct('>L')
-STRUCT_BIG_ULONGLONG = struct.Struct('>Q')
+STRUCT_BIG_USHORT = struct.Struct(b'>H')
+STRUCT_BIG_ULONG = struct.Struct(b'>L')
+STRUCT_BIG_ULONGLONG = struct.Struct(b'>Q')
 
 SPECIAL_NONE = 0
 SPECIAL_START_INDEFINITE_BYTESTRING = 1
@@ -355,7 +355,7 @@
             return True, None, 1, SPECIAL_START_INDEFINITE_BYTESTRING
 
     elif majortype == MAJOR_TYPE_STRING:
-        raise CBORDecodeError('string major type not supported')
+        raise CBORDecodeError(b'string major type not supported')
 
     elif majortype == MAJOR_TYPE_ARRAY:
         # Beginning of arrays are treated as uints in order to decode their
@@ -404,13 +404,13 @@
 
             if special != SPECIAL_START_ARRAY:
                 raise CBORDecodeError(
-                    'expected array after finite set ' 'semantic tag'
+                    b'expected array after finite set ' b'semantic tag'
                 )
 
             return True, size, readcount + readcount2 + 1, SPECIAL_START_SET
 
         else:
-            raise CBORDecodeError('semantic tag %d not allowed' % tagvalue)
+            raise CBORDecodeError(b'semantic tag %d not allowed' % tagvalue)
 
     elif majortype == MAJOR_TYPE_SPECIAL:
         # Only specific values for the information field are allowed.
@@ -424,7 +424,7 @@
             return True, None, 1, SPECIAL_INDEFINITE_BREAK
         # If value is 24, subtype is in next byte.
         else:
-            raise CBORDecodeError('special type %d not allowed' % subtype)
+            raise CBORDecodeError(b'special type %d not allowed' % subtype)
     else:
         assert False
 
@@ -457,10 +457,10 @@
         if allowindefinite:
             return True, None, 0
         else:
-            raise CBORDecodeError('indefinite length uint not allowed here')
+            raise CBORDecodeError(b'indefinite length uint not allowed here')
     elif subtype >= 28:
         raise CBORDecodeError(
-            'unsupported subtype on integer type: %d' % subtype
+            b'unsupported subtype on integer type: %d' % subtype
         )
 
     if subtype == 24:
@@ -472,7 +472,7 @@
     elif subtype == 27:
         s = STRUCT_BIG_ULONGLONG
     else:
-        raise CBORDecodeError('bounds condition checking violation')
+        raise CBORDecodeError(b'bounds condition checking violation')
 
     if len(b) - offset >= s.size:
         return True, s.unpack_from(b, offset)[0], s.size
@@ -641,19 +641,19 @@
 
                 elif special == SPECIAL_START_ARRAY:
                     self._collectionstack.append(
-                        {'remaining': value, 'v': [],}
+                        {b'remaining': value, b'v': [],}
                     )
                     self._state = self._STATE_WANT_ARRAY_VALUE
 
                 elif special == SPECIAL_START_MAP:
                     self._collectionstack.append(
-                        {'remaining': value, 'v': {},}
+                        {b'remaining': value, b'v': {},}
                     )
                     self._state = self._STATE_WANT_MAP_KEY
 
                 elif special == SPECIAL_START_SET:
                     self._collectionstack.append(
-                        {'remaining': value, 'v': set(),}
+                        {b'remaining': value, b'v': set(),}
                     )
                     self._state = self._STATE_WANT_SET_VALUE
 
@@ -662,7 +662,7 @@
 
                 else:
                     raise CBORDecodeError(
-                        'unhandled special state: %d' % special
+                        b'unhandled special state: %d' % special
                     )
 
             # This value becomes an element of the current array.
@@ -670,8 +670,8 @@
                 # Simple values get appended.
                 if special == SPECIAL_NONE:
                     c = self._collectionstack[-1]
-                    c['v'].append(value)
-                    c['remaining'] -= 1
+                    c[b'v'].append(value)
+                    c[b'remaining'] -= 1
 
                     # self._state doesn't need changed.
 
@@ -680,11 +680,11 @@
                     lastc = self._collectionstack[-1]
                     newvalue = []
 
-                    lastc['v'].append(newvalue)
-                    lastc['remaining'] -= 1
+                    lastc[b'v'].append(newvalue)
+                    lastc[b'remaining'] -= 1
 
                     self._collectionstack.append(
-                        {'remaining': value, 'v': newvalue,}
+                        {b'remaining': value, b'v': newvalue,}
                     )
 
                     # self._state doesn't need changed.
@@ -694,11 +694,11 @@
                     lastc = self._collectionstack[-1]
                     newvalue = {}
 
-                    lastc['v'].append(newvalue)
-                    lastc['remaining'] -= 1
+                    lastc[b'v'].append(newvalue)
+                    lastc[b'remaining'] -= 1
 
                     self._collectionstack.append(
-                        {'remaining': value, 'v': newvalue}
+                        {b'remaining': value, b'v': newvalue}
                     )
 
                     self._state = self._STATE_WANT_MAP_KEY
@@ -707,25 +707,25 @@
                     lastc = self._collectionstack[-1]
                     newvalue = set()
 
-                    lastc['v'].append(newvalue)
-                    lastc['remaining'] -= 1
+                    lastc[b'v'].append(newvalue)
+                    lastc[b'remaining'] -= 1
 
                     self._collectionstack.append(
-                        {'remaining': value, 'v': newvalue,}
+                        {b'remaining': value, b'v': newvalue,}
                     )
 
                     self._state = self._STATE_WANT_SET_VALUE
 
                 elif special == SPECIAL_START_INDEFINITE_BYTESTRING:
                     raise CBORDecodeError(
-                        'indefinite length bytestrings '
-                        'not allowed as array values'
+                        b'indefinite length bytestrings '
+                        b'not allowed as array values'
                     )
 
                 else:
                     raise CBORDecodeError(
-                        'unhandled special item when '
-                        'expecting array value: %d' % special
+                        b'unhandled special item when '
+                        b'expecting array value: %d' % special
                     )
 
             # This value becomes the key of the current map instance.
@@ -736,8 +736,8 @@
 
                 elif special == SPECIAL_START_INDEFINITE_BYTESTRING:
                     raise CBORDecodeError(
-                        'indefinite length bytestrings '
-                        'not allowed as map keys'
+                        b'indefinite length bytestrings '
+                        b'not allowed as map keys'
                     )
 
                 elif special in (
@@ -746,14 +746,14 @@
                     SPECIAL_START_SET,
                 ):
                     raise CBORDecodeError(
-                        'collections not supported as map ' 'keys'
+                        b'collections not supported as map ' b'keys'
                     )
 
                 # We do not allow special values to be used as map keys.
                 else:
                     raise CBORDecodeError(
-                        'unhandled special item when '
-                        'expecting map key: %d' % special
+                        b'unhandled special item when '
+                        b'expecting map key: %d' % special
                     )
 
             # This value becomes the value of the current map key.
@@ -761,8 +761,8 @@
                 # Simple values simply get inserted into the map.
                 if special == SPECIAL_NONE:
                     lastc = self._collectionstack[-1]
-                    lastc['v'][self._currentmapkey] = value
-                    lastc['remaining'] -= 1
+                    lastc[b'v'][self._currentmapkey] = value
+                    lastc[b'remaining'] -= 1
 
                     self._state = self._STATE_WANT_MAP_KEY
 
@@ -771,11 +771,11 @@
                     lastc = self._collectionstack[-1]
                     newvalue = []
 
-                    lastc['v'][self._currentmapkey] = newvalue
-                    lastc['remaining'] -= 1
+                    lastc[b'v'][self._currentmapkey] = newvalue
+                    lastc[b'remaining'] -= 1
 
                     self._collectionstack.append(
-                        {'remaining': value, 'v': newvalue,}
+                        {b'remaining': value, b'v': newvalue,}
                     )
 
                     self._state = self._STATE_WANT_ARRAY_VALUE
@@ -785,11 +785,11 @@
                     lastc = self._collectionstack[-1]
                     newvalue = {}
 
-                    lastc['v'][self._currentmapkey] = newvalue
-                    lastc['remaining'] -= 1
+                    lastc[b'v'][self._currentmapkey] = newvalue
+                    lastc[b'remaining'] -= 1
 
                     self._collectionstack.append(
-                        {'remaining': value, 'v': newvalue,}
+                        {b'remaining': value, b'v': newvalue,}
                     )
 
                     self._state = self._STATE_WANT_MAP_KEY
@@ -799,25 +799,25 @@
                     lastc = self._collectionstack[-1]
                     newvalue = set()
 
-                    lastc['v'][self._currentmapkey] = newvalue
-                    lastc['remaining'] -= 1
+                    lastc[b'v'][self._currentmapkey] = newvalue
+                    lastc[b'remaining'] -= 1
 
                     self._collectionstack.append(
-                        {'remaining': value, 'v': newvalue,}
+                        {b'remaining': value, b'v': newvalue,}
                     )
 
                     self._state = self._STATE_WANT_SET_VALUE
 
                 elif special == SPECIAL_START_INDEFINITE_BYTESTRING:
                     raise CBORDecodeError(
-                        'indefinite length bytestrings not '
-                        'allowed as map values'
+                        b'indefinite length bytestrings not '
+                        b'allowed as map values'
                     )
 
                 else:
                     raise CBORDecodeError(
-                        'unhandled special item when '
-                        'expecting map value: %d' % special
+                        b'unhandled special item when '
+                        b'expecting map value: %d' % special
                     )
 
                 self._currentmapkey = None
@@ -826,13 +826,13 @@
             elif self._state == self._STATE_WANT_SET_VALUE:
                 if special == SPECIAL_NONE:
                     lastc = self._collectionstack[-1]
-                    lastc['v'].add(value)
-                    lastc['remaining'] -= 1
+                    lastc[b'v'].add(value)
+                    lastc[b'remaining'] -= 1
 
                 elif special == SPECIAL_START_INDEFINITE_BYTESTRING:
                     raise CBORDecodeError(
-                        'indefinite length bytestrings not '
-                        'allowed as set values'
+                        b'indefinite length bytestrings not '
+                        b'allowed as set values'
                     )
 
                 elif special in (
@@ -841,14 +841,14 @@
                     SPECIAL_START_SET,
                 ):
                     raise CBORDecodeError(
-                        'collections not allowed as set ' 'values'
+                        b'collections not allowed as set ' b'values'
                     )
 
                 # We don't allow non-trivial types to exist as set values.
                 else:
                     raise CBORDecodeError(
-                        'unhandled special item when '
-                        'expecting set value: %d' % special
+                        b'unhandled special item when '
+                        b'expecting set value: %d' % special
                     )
 
             # This value represents the first chunk in an indefinite length
@@ -879,8 +879,8 @@
 
                 else:
                     raise CBORDecodeError(
-                        'unexpected special value when '
-                        'expecting bytestring chunk: %d' % special
+                        b'unexpected special value when '
+                        b'expecting bytestring chunk: %d' % special
                     )
 
             # This value represents the non-initial chunk in an indefinite
@@ -901,13 +901,13 @@
 
                 else:
                     raise CBORDecodeError(
-                        'unexpected special value when '
-                        'expecting bytestring chunk: %d' % special
+                        b'unexpected special value when '
+                        b'expecting bytestring chunk: %d' % special
                     )
 
             else:
                 raise CBORDecodeError(
-                    'unhandled decoder state: %d' % self._state
+                    b'unhandled decoder state: %d' % self._state
                 )
 
             # We could have just added the final value in a collection. End
@@ -924,7 +924,7 @@
                 # Or we are expecting more items for this collection.
                 lastc = self._collectionstack[-1]
 
-                if lastc['remaining']:
+                if lastc[b'remaining']:
                     break
 
                 # The collection at the top of the stack is complete.
@@ -941,11 +941,11 @@
                         list: self._STATE_WANT_ARRAY_VALUE,
                         dict: self._STATE_WANT_MAP_KEY,
                         set: self._STATE_WANT_SET_VALUE,
-                    }[type(self._collectionstack[-1]['v'])]
+                    }[type(self._collectionstack[-1][b'v'])]
 
                 # If this is the root collection, emit it.
                 else:
-                    self._decodedvalues.append(lastc['v'])
+                    self._decodedvalues.append(lastc[b'v'])
                     self._state = self._STATE_NONE
 
         return (
@@ -1053,9 +1053,9 @@
     havevalues, readcount, wantbytes = decoder.decode(b)
 
     if readcount != len(b):
-        raise CBORDecodeError('input data not fully consumed')
+        raise CBORDecodeError(b'input data not fully consumed')
 
     if decoder.inprogress:
-        raise CBORDecodeError('input data not complete')
+        raise CBORDecodeError(b'input data not complete')
 
     return decoder.getavailable()
--- a/mercurial/utils/compression.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/utils/compression.py	Sun Oct 06 09:48:39 2019 -0400
@@ -24,8 +24,8 @@
 
 # compression code
 
-SERVERROLE = 'server'
-CLIENTROLE = 'client'
+SERVERROLE = b'server'
+CLIENTROLE = b'client'
 
 compewireprotosupport = collections.namedtuple(
     r'compenginewireprotosupport',
@@ -85,13 +85,13 @@
         The argument must be a ``compressionengine`` instance.
         """
         if not isinstance(engine, compressionengine):
-            raise ValueError(_('argument must be a compressionengine'))
+            raise ValueError(_(b'argument must be a compressionengine'))
 
         name = engine.name()
 
         if name in self._engines:
             raise error.Abort(
-                _('compression engine %s already registered') % name
+                _(b'compression engine %s already registered') % name
             )
 
         bundleinfo = engine.bundletype()
@@ -100,11 +100,11 @@
 
             if bundlename in self._bundlenames:
                 raise error.Abort(
-                    _('bundle name %s already registered') % bundlename
+                    _(b'bundle name %s already registered') % bundlename
                 )
             if bundletype in self._bundletypes:
                 raise error.Abort(
-                    _('bundle type %s already registered by %s')
+                    _(b'bundle type %s already registered by %s')
                     % (bundletype, self._bundletypes[bundletype])
                 )
 
@@ -120,8 +120,8 @@
             if wiretype in self._wiretypes:
                 raise error.Abort(
                     _(
-                        'wire protocol compression %s already '
-                        'registered by %s'
+                        b'wire protocol compression %s already '
+                        b'registered by %s'
                     )
                     % (wiretype, self._wiretypes[wiretype])
                 )
@@ -131,7 +131,7 @@
         revlogheader = engine.revlogheader()
         if revlogheader and revlogheader in self._revlogheaders:
             raise error.Abort(
-                _('revlog header %s already registered by %s')
+                _(b'revlog header %s already registered by %s')
                 % (revlogheader, self._revlogheaders[revlogheader])
             )
 
@@ -158,7 +158,7 @@
         engine = self._engines[self._bundlenames[bundlename]]
         if not engine.available():
             raise error.Abort(
-                _('compression engine %s could not be loaded') % engine.name()
+                _(b'compression engine %s could not be loaded') % engine.name()
             )
         return engine
 
@@ -172,7 +172,7 @@
         engine = self._engines[self._bundletypes[bundletype]]
         if not engine.available():
             raise error.Abort(
-                _('compression engine %s could not be loaded') % engine.name()
+                _(b'compression engine %s could not be loaded') % engine.name()
             )
         return engine
 
@@ -186,7 +186,7 @@
         """
         assert role in (SERVERROLE, CLIENTROLE)
 
-        attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
+        attr = b'serverpriority' if role == SERVERROLE else b'clientpriority'
 
         engines = [self._engines[e] for e in self._wiretypes.values()]
         if onlyavailable:
@@ -205,7 +205,7 @@
         engine = self._engines[self._wiretypes[wiretype]]
         if not engine.available():
             raise error.Abort(
-                _('compression engine %s could not be loaded') % engine.name()
+                _(b'compression engine %s could not be loaded') % engine.name()
             )
         return engine
 
@@ -360,7 +360,7 @@
                     newbuf = self._pending[0]
                     buf.append(newbuf[self._pos : self._pos + l])
                     self._pos += l
-                    return ''.join(buf)
+                    return b''.join(buf)
 
                 newbuf = self._pending.pop(0)
                 if self._pos:
@@ -372,12 +372,12 @@
                 self._pos = 0
 
             if self._eof:
-                return ''.join(buf)
+                return b''.join(buf)
             chunk = self._reader(65536)
             self._decompress(chunk)
             if not chunk and not self._pending and not self._eof:
                 # No progress and no new data, bail out
-                return ''.join(buf)
+                return b''.join(buf)
 
 
 class _GzipCompressedStreamReader(_CompressedStreamReader):
@@ -391,9 +391,9 @@
             self._pending.append(newbuf)
         d = self._decompobj.copy()
         try:
-            d.decompress('x')
+            d.decompress(b'x')
             d.flush()
-            if d.unused_data == 'x':
+            if d.unused_data == b'x':
                 self._eof = True
         except zlib.error:
             pass
@@ -410,7 +410,7 @@
             self._pending.append(newbuf)
         try:
             while True:
-                newbuf = self._decompobj.decompress('')
+                newbuf = self._decompobj.decompress(b'')
                 if newbuf:
                     self._pending.append(newbuf)
                 else:
@@ -422,7 +422,7 @@
 class _TruncatedBZ2CompressedStreamReader(_BZ2CompressedStreamReader):
     def __init__(self, fh):
         super(_TruncatedBZ2CompressedStreamReader, self).__init__(fh)
-        newbuf = self._decompobj.decompress('BZ')
+        newbuf = self._decompobj.decompress(b'BZ')
         if newbuf:
             self._pending.append(newbuf)
 
@@ -439,7 +439,7 @@
             self._pending.append(newbuf)
         try:
             while True:
-                newbuf = self._decompobj.decompress('')
+                newbuf = self._decompobj.decompress(b'')
                 if newbuf:
                     self._pending.append(newbuf)
                 else:
@@ -450,7 +450,7 @@
 
 class _zlibengine(compressionengine):
     def name(self):
-        return 'zlib'
+        return b'zlib'
 
     def bundletype(self):
         """zlib compression using the DEFLATE algorithm.
@@ -459,18 +459,18 @@
         algorithm strikes a reasonable balance between compression ratio
         and size.
         """
-        return 'gzip', 'GZ'
+        return b'gzip', b'GZ'
 
     def wireprotosupport(self):
-        return compewireprotosupport('zlib', 20, 20)
+        return compewireprotosupport(b'zlib', 20, 20)
 
     def revlogheader(self):
-        return 'x'
+        return b'x'
 
     def compressstream(self, it, opts=None):
         opts = opts or {}
 
-        z = zlib.compressobj(opts.get('level', -1))
+        z = zlib.compressobj(opts.get(b'level', -1))
         for chunk in it:
             data = z.compress(chunk)
             # Not all calls to compress emit data. It is cheaper to inspect
@@ -521,7 +521,7 @@
                 parts.append(z.flush())
 
                 if sum(map(len, parts)) < insize:
-                    return ''.join(parts)
+                    return b''.join(parts)
                 return None
 
         def decompress(self, data):
@@ -529,14 +529,14 @@
                 return zlib.decompress(data)
             except zlib.error as e:
                 raise error.StorageError(
-                    _('revlog decompress error: %s')
+                    _(b'revlog decompress error: %s')
                     % stringutil.forcebytestr(e)
                 )
 
     def revlogcompressor(self, opts=None):
         level = None
         if opts is not None:
-            level = opts.get('zlib.level')
+            level = opts.get(b'zlib.level')
         return self.zlibrevlogcompressor(level)
 
 
@@ -545,7 +545,7 @@
 
 class _bz2engine(compressionengine):
     def name(self):
-        return 'bz2'
+        return b'bz2'
 
     def bundletype(self):
         """An algorithm that produces smaller bundles than ``gzip``.
@@ -559,16 +559,16 @@
         If available, the ``zstd`` engine can yield similar or better
         compression at much higher speeds.
         """
-        return 'bzip2', 'BZ'
+        return b'bzip2', b'BZ'
 
     # We declare a protocol name but don't advertise by default because
     # it is slow.
     def wireprotosupport(self):
-        return compewireprotosupport('bzip2', 0, 0)
+        return compewireprotosupport(b'bzip2', 0, 0)
 
     def compressstream(self, it, opts=None):
         opts = opts or {}
-        z = bz2.BZ2Compressor(opts.get('level', 9))
+        z = bz2.BZ2Compressor(opts.get(b'level', 9))
         for chunk in it:
             data = z.compress(chunk)
             if data:
@@ -585,10 +585,10 @@
 
 class _truncatedbz2engine(compressionengine):
     def name(self):
-        return 'bz2truncated'
+        return b'bz2truncated'
 
     def bundletype(self):
-        return None, '_truncatedBZ'
+        return None, b'_truncatedBZ'
 
     # We don't implement compressstream because it is hackily handled elsewhere.
 
@@ -601,20 +601,20 @@
 
 class _noopengine(compressionengine):
     def name(self):
-        return 'none'
+        return b'none'
 
     def bundletype(self):
         """No compression is performed.
 
         Use this compression engine to explicitly disable compression.
         """
-        return 'none', 'UN'
+        return b'none', b'UN'
 
     # Clients always support uncompressed payloads. Servers don't because
     # unless you are on a fast network, uncompressed payloads can easily
     # saturate your network pipe.
     def wireprotosupport(self):
-        return compewireprotosupport('none', 0, 10)
+        return compewireprotosupport(b'none', 0, 10)
 
     # We don't implement revlogheader because it is handled specially
     # in the revlog class.
@@ -638,7 +638,7 @@
 
 class _zstdengine(compressionengine):
     def name(self):
-        return 'zstd'
+        return b'zstd'
 
     @propertycache
     def _module(self):
@@ -668,20 +668,20 @@
         If this engine is available and backwards compatibility is not a
         concern, it is likely the best available engine.
         """
-        return 'zstd', 'ZS'
+        return b'zstd', b'ZS'
 
     def wireprotosupport(self):
-        return compewireprotosupport('zstd', 50, 50)
+        return compewireprotosupport(b'zstd', 50, 50)
 
     def revlogheader(self):
-        return '\x28'
+        return b'\x28'
 
     def compressstream(self, it, opts=None):
         opts = opts or {}
         # zstd level 3 is almost always significantly faster than zlib
         # while providing no worse compression. It strikes a good balance
         # between speed and compression.
-        level = opts.get('level', 3)
+        level = opts.get(b'level', 3)
 
         zstd = self._module
         z = zstd.ZstdCompressor(level=level).compressobj()
@@ -732,7 +732,7 @@
                 chunks.append(z.flush())
 
                 if sum(map(len, chunks)) < insize:
-                    return ''.join(chunks)
+                    return b''.join(chunks)
                 return None
 
         def decompress(self, data):
@@ -752,18 +752,18 @@
                     pos = pos2
                 # Frame should be exhausted, so no finish() API.
 
-                return ''.join(chunks)
+                return b''.join(chunks)
             except Exception as e:
                 raise error.StorageError(
-                    _('revlog decompress error: %s')
+                    _(b'revlog decompress error: %s')
                     % stringutil.forcebytestr(e)
                 )
 
     def revlogcompressor(self, opts=None):
         opts = opts or {}
-        level = opts.get('zstd.level')
+        level = opts.get(b'zstd.level')
         if level is None:
-            level = opts.get('level')
+            level = opts.get(b'level')
         if level is None:
             level = 3
         return self.zstdrevlogcompressor(self._module, level=level)
--- a/mercurial/utils/dateutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/utils/dateutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -20,40 +20,40 @@
 
 # used by parsedate
 defaultdateformats = (
-    '%Y-%m-%dT%H:%M:%S',  # the 'real' ISO8601
-    '%Y-%m-%dT%H:%M',  #   without seconds
-    '%Y-%m-%dT%H%M%S',  # another awful but legal variant without :
-    '%Y-%m-%dT%H%M',  #   without seconds
-    '%Y-%m-%d %H:%M:%S',  # our common legal variant
-    '%Y-%m-%d %H:%M',  #   without seconds
-    '%Y-%m-%d %H%M%S',  # without :
-    '%Y-%m-%d %H%M',  #   without seconds
-    '%Y-%m-%d %I:%M:%S%p',
-    '%Y-%m-%d %H:%M',
-    '%Y-%m-%d %I:%M%p',
-    '%Y-%m-%d',
-    '%m-%d',
-    '%m/%d',
-    '%m/%d/%y',
-    '%m/%d/%Y',
-    '%a %b %d %H:%M:%S %Y',
-    '%a %b %d %I:%M:%S%p %Y',
-    '%a, %d %b %Y %H:%M:%S',  #  GNU coreutils "/bin/date --rfc-2822"
-    '%b %d %H:%M:%S %Y',
-    '%b %d %I:%M:%S%p %Y',
-    '%b %d %H:%M:%S',
-    '%b %d %I:%M:%S%p',
-    '%b %d %H:%M',
-    '%b %d %I:%M%p',
-    '%b %d %Y',
-    '%b %d',
-    '%H:%M:%S',
-    '%I:%M:%S%p',
-    '%H:%M',
-    '%I:%M%p',
+    b'%Y-%m-%dT%H:%M:%S',  # the 'real' ISO8601
+    b'%Y-%m-%dT%H:%M',  #   without seconds
+    b'%Y-%m-%dT%H%M%S',  # another awful but legal variant without :
+    b'%Y-%m-%dT%H%M',  #   without seconds
+    b'%Y-%m-%d %H:%M:%S',  # our common legal variant
+    b'%Y-%m-%d %H:%M',  #   without seconds
+    b'%Y-%m-%d %H%M%S',  # without :
+    b'%Y-%m-%d %H%M',  #   without seconds
+    b'%Y-%m-%d %I:%M:%S%p',
+    b'%Y-%m-%d %H:%M',
+    b'%Y-%m-%d %I:%M%p',
+    b'%Y-%m-%d',
+    b'%m-%d',
+    b'%m/%d',
+    b'%m/%d/%y',
+    b'%m/%d/%Y',
+    b'%a %b %d %H:%M:%S %Y',
+    b'%a %b %d %I:%M:%S%p %Y',
+    b'%a, %d %b %Y %H:%M:%S',  #  GNU coreutils "/bin/date --rfc-2822"
+    b'%b %d %H:%M:%S %Y',
+    b'%b %d %I:%M:%S%p %Y',
+    b'%b %d %H:%M:%S',
+    b'%b %d %I:%M:%S%p',
+    b'%b %d %H:%M',
+    b'%b %d %I:%M%p',
+    b'%b %d %Y',
+    b'%b %d',
+    b'%H:%M:%S',
+    b'%I:%M:%S%p',
+    b'%H:%M',
+    b'%I:%M%p',
 )
 
-extendeddateformats = defaultdateformats + ("%Y", "%Y-%m", "%b", "%b %Y",)
+extendeddateformats = defaultdateformats + (b"%Y", b"%Y-%m", b"%b", b"%b %Y",)
 
 
 def makedate(timestamp=None):
@@ -62,8 +62,8 @@
     if timestamp is None:
         timestamp = time.time()
     if timestamp < 0:
-        hint = _("check your clock")
-        raise error.Abort(_("negative timestamp: %d") % timestamp, hint=hint)
+        hint = _(b"check your clock")
+        raise error.Abort(_(b"negative timestamp: %d") % timestamp, hint=hint)
     delta = datetime.datetime.utcfromtimestamp(
         timestamp
     ) - datetime.datetime.fromtimestamp(timestamp)
@@ -71,7 +71,7 @@
     return timestamp, tz
 
 
-def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
+def datestr(date=None, format=b'%a %b %d %H:%M:%S %Y %1%2'):
     """represent a (unixtime, offset) tuple as a localized time.
     unixtime is seconds since the epoch, and offset is the time zone's
     number of seconds away from UTC.
@@ -88,13 +88,13 @@
     'Fri Dec 13 20:45:52 1901 +0000'
     """
     t, tz = date or makedate()
-    if "%1" in format or "%2" in format or "%z" in format:
-        sign = (tz > 0) and "-" or "+"
+    if b"%1" in format or b"%2" in format or b"%z" in format:
+        sign = (tz > 0) and b"-" or b"+"
         minutes = abs(tz) // 60
         q, r = divmod(minutes, 60)
-        format = format.replace("%z", "%1%2")
-        format = format.replace("%1", "%c%02d" % (sign, q))
-        format = format.replace("%2", "%02d" % r)
+        format = format.replace(b"%z", b"%1%2")
+        format = format.replace(b"%1", b"%c%02d" % (sign, q))
+        format = format.replace(b"%2", b"%02d" % r)
     d = t - tz
     if d > 0x7FFFFFFF:
         d = 0x7FFFFFFF
@@ -110,7 +110,7 @@
 
 def shortdate(date=None):
     """turn (timestamp, tzoff) tuple into iso 8631 date."""
-    return datestr(date, format='%Y-%m-%d')
+    return datestr(date, format=b'%Y-%m-%d')
 
 
 def parsetimezone(s):
@@ -118,29 +118,29 @@
        (offset, remainder) pair"""
     s = pycompat.bytestr(s)
 
-    if s.endswith("GMT") or s.endswith("UTC"):
+    if s.endswith(b"GMT") or s.endswith(b"UTC"):
         return 0, s[:-3].rstrip()
 
     # Unix-style timezones [+-]hhmm
-    if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
-        sign = (s[-5] == "+") and 1 or -1
+    if len(s) >= 5 and s[-5] in b"+-" and s[-4:].isdigit():
+        sign = (s[-5] == b"+") and 1 or -1
         hours = int(s[-4:-2])
         minutes = int(s[-2:])
         return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
 
     # ISO8601 trailing Z
-    if s.endswith("Z") and s[-2:-1].isdigit():
+    if s.endswith(b"Z") and s[-2:-1].isdigit():
         return 0, s[:-1]
 
     # ISO8601-style [+-]hh:mm
     if (
         len(s) >= 6
-        and s[-6] in "+-"
-        and s[-3] == ":"
+        and s[-6] in b"+-"
+        and s[-3] == b":"
         and s[-5:-3].isdigit()
         and s[-2:].isdigit()
     ):
-        sign = (s[-6] == "+") and 1 or -1
+        sign = (s[-6] == b"+") and 1 or -1
         hours = int(s[-5:-3])
         minutes = int(s[-2:])
         return -sign * (hours * 60 + minutes) * 60, s[:-6]
@@ -159,12 +159,19 @@
 
     # add missing elements from defaults
     usenow = False  # default to using biased defaults
-    for part in ("S", "M", "HI", "d", "mb", "yY"):  # decreasing specificity
+    for part in (
+        b"S",
+        b"M",
+        b"HI",
+        b"d",
+        b"mb",
+        b"yY",
+    ):  # decreasing specificity
         part = pycompat.bytestr(part)
-        found = [True for p in part if ("%" + p) in format]
+        found = [True for p in part if (b"%" + p) in format]
         if not found:
-            date += "@" + defaults[part][usenow]
-            format += "@%" + part[0]
+            date += b"@" + defaults[part][usenow]
+            format += b"@%" + part[0]
         else:
             # We've found a specific time element, less specific time
             # elements are relative to today
@@ -213,34 +220,34 @@
         formats = defaultdateformats
     date = date.strip()
 
-    if date == 'now' or date == _('now'):
+    if date == b'now' or date == _(b'now'):
         return makedate()
-    if date == 'today' or date == _('today'):
+    if date == b'today' or date == _(b'today'):
         date = datetime.date.today().strftime(r'%b %d')
         date = encoding.strtolocal(date)
-    elif date == 'yesterday' or date == _('yesterday'):
+    elif date == b'yesterday' or date == _(b'yesterday'):
         date = (datetime.date.today() - datetime.timedelta(days=1)).strftime(
             r'%b %d'
         )
         date = encoding.strtolocal(date)
 
     try:
-        when, offset = map(int, date.split(' '))
+        when, offset = map(int, date.split(b' '))
     except ValueError:
         # fill out defaults
         now = makedate()
         defaults = {}
-        for part in ("d", "mb", "yY", "HI", "M", "S"):
+        for part in (b"d", b"mb", b"yY", b"HI", b"M", b"S"):
             # this piece is for rounding the specific end of unknowns
             b = bias.get(part)
             if b is None:
-                if part[0:1] in "HMS":
-                    b = "00"
+                if part[0:1] in b"HMS":
+                    b = b"00"
                 else:
-                    b = "0"
+                    b = b"0"
 
             # this piece is for matching the generic end to today's date
-            n = datestr(now, "%" + part[0:1])
+            n = datestr(now, b"%" + part[0:1])
 
             defaults[part] = (b, n)
 
@@ -253,16 +260,16 @@
                 break
         else:
             raise error.ParseError(
-                _('invalid date: %r') % pycompat.bytestr(date)
+                _(b'invalid date: %r') % pycompat.bytestr(date)
             )
     # validate explicit (probably user-specified) date and
     # time zone offset. values must fit in signed 32 bits for
     # current 32-bit linux runtimes. timezones go from UTC-12
     # to UTC+14
     if when < -0x80000000 or when > 0x7FFFFFFF:
-        raise error.ParseError(_('date exceeds 32 bits: %d') % when)
+        raise error.ParseError(_(b'date exceeds 32 bits: %d') % when)
     if offset < -50400 or offset > 43200:
-        raise error.ParseError(_('impossible time zone offset: %d') % offset)
+        raise error.ParseError(_(b'impossible time zone offset: %d') % offset)
     return when, offset
 
 
@@ -296,42 +303,42 @@
     """
 
     def lower(date):
-        d = {'mb': "1", 'd': "1"}
+        d = {b'mb': b"1", b'd': b"1"}
         return parsedate(date, extendeddateformats, d)[0]
 
     def upper(date):
-        d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
-        for days in ("31", "30", "29"):
+        d = {b'mb': b"12", b'HI': b"23", b'M': b"59", b'S': b"59"}
+        for days in (b"31", b"30", b"29"):
             try:
-                d["d"] = days
+                d[b"d"] = days
                 return parsedate(date, extendeddateformats, d)[0]
             except error.ParseError:
                 pass
-        d["d"] = "28"
+        d[b"d"] = b"28"
         return parsedate(date, extendeddateformats, d)[0]
 
     date = date.strip()
 
     if not date:
-        raise error.Abort(_("dates cannot consist entirely of whitespace"))
+        raise error.Abort(_(b"dates cannot consist entirely of whitespace"))
     elif date[0:1] == b"<":
         if not date[1:]:
-            raise error.Abort(_("invalid day spec, use '<DATE'"))
+            raise error.Abort(_(b"invalid day spec, use '<DATE'"))
         when = upper(date[1:])
         return lambda x: x <= when
     elif date[0:1] == b">":
         if not date[1:]:
-            raise error.Abort(_("invalid day spec, use '>DATE'"))
+            raise error.Abort(_(b"invalid day spec, use '>DATE'"))
         when = lower(date[1:])
         return lambda x: x >= when
     elif date[0:1] == b"-":
         try:
             days = int(date[1:])
         except ValueError:
-            raise error.Abort(_("invalid day spec: %s") % date[1:])
+            raise error.Abort(_(b"invalid day spec: %s") % date[1:])
         if days < 0:
             raise error.Abort(
-                _("%s must be nonnegative (see 'hg help dates')") % date[1:]
+                _(b"%s must be nonnegative (see 'hg help dates')") % date[1:]
             )
         when = makedate()[0] - days * 3600 * 24
         return lambda x: x >= when
--- a/mercurial/utils/procutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/utils/procutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -91,8 +91,8 @@
     """return a message describing a subprocess status
     (codes from kill are negative - not os.system/wait encoding)"""
     if code >= 0:
-        return _("exited with status %d") % code
-    return _("killed by signal %d") % -code
+        return _(b"exited with status %d") % code
+    return _(b"killed by signal %d") % -code
 
 
 class _pfile(object):
@@ -120,12 +120,12 @@
         self.close()
 
 
-def popen(cmd, mode='rb', bufsize=-1):
-    if mode == 'rb':
+def popen(cmd, mode=b'rb', bufsize=-1):
+    if mode == b'rb':
         return _popenreader(cmd, bufsize)
-    elif mode == 'wb':
+    elif mode == b'wb':
         return _popenwriter(cmd, bufsize)
-    raise error.ProgrammingError('unsupported mode: %r' % mode)
+    raise error.ProgrammingError(b'unsupported mode: %r' % mode)
 
 
 def _popenreader(cmd, bufsize):
@@ -205,22 +205,22 @@
     the temporary files generated.'''
     inname, outname = None, None
     try:
-        infd, inname = pycompat.mkstemp(prefix='hg-filter-in-')
+        infd, inname = pycompat.mkstemp(prefix=b'hg-filter-in-')
         fp = os.fdopen(infd, r'wb')
         fp.write(s)
         fp.close()
-        outfd, outname = pycompat.mkstemp(prefix='hg-filter-out-')
+        outfd, outname = pycompat.mkstemp(prefix=b'hg-filter-out-')
         os.close(outfd)
-        cmd = cmd.replace('INFILE', inname)
-        cmd = cmd.replace('OUTFILE', outname)
+        cmd = cmd.replace(b'INFILE', inname)
+        cmd = cmd.replace(b'OUTFILE', outname)
         code = system(cmd)
-        if pycompat.sysplatform == 'OpenVMS' and code & 1:
+        if pycompat.sysplatform == b'OpenVMS' and code & 1:
             code = 0
         if code:
             raise error.Abort(
-                _("command '%s' failed: %s") % (cmd, explainexit(code))
+                _(b"command '%s' failed: %s") % (cmd, explainexit(code))
             )
-        with open(outname, 'rb') as fp:
+        with open(outname, b'rb') as fp:
             return fp.read()
     finally:
         try:
@@ -236,13 +236,13 @@
 
 
 _filtertable = {
-    'tempfile:': tempfilter,
-    'pipe:': pipefilter,
+    b'tempfile:': tempfilter,
+    b'pipe:': pipefilter,
 }
 
 
 def filter(s, cmd):
-    "filter a string through a command that transforms its input to its output"
+    b"filter a string through a command that transforms its input to its output"
     for name, fn in _filtertable.iteritems():
         if cmd.startswith(name):
             return fn(s, cmd[len(name) :].lstrip())
@@ -256,8 +256,8 @@
     (portable, not much used).
     """
     return (
-        pycompat.safehasattr(sys, "frozen")
-        or pycompat.safehasattr(sys, "importers")  # new py2exe
+        pycompat.safehasattr(sys, b"frozen")
+        or pycompat.safehasattr(sys, b"importers")  # new py2exe
         or imp.is_frozen(r"__main__")  # old py2exe
     )  # tools/freeze
 
@@ -271,27 +271,27 @@
     Defaults to $HG or 'hg' in the search path.
     """
     if _hgexecutable is None:
-        hg = encoding.environ.get('HG')
+        hg = encoding.environ.get(b'HG')
         mainmod = sys.modules[r'__main__']
         if hg:
             _sethgexecutable(hg)
         elif mainfrozen():
-            if getattr(sys, 'frozen', None) == 'macosx_app':
+            if getattr(sys, 'frozen', None) == b'macosx_app':
                 # Env variable set by py2app
-                _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
+                _sethgexecutable(encoding.environ[b'EXECUTABLEPATH'])
             else:
                 _sethgexecutable(pycompat.sysexecutable)
         elif (
             not pycompat.iswindows
             and os.path.basename(
-                pycompat.fsencode(getattr(mainmod, '__file__', ''))
+                pycompat.fsencode(getattr(mainmod, '__file__', b''))
             )
-            == 'hg'
+            == b'hg'
         ):
             _sethgexecutable(pycompat.fsencode(mainmod.__file__))
         else:
             _sethgexecutable(
-                findexe('hg') or os.path.basename(pycompat.sysargv[0])
+                findexe(b'hg') or os.path.basename(pycompat.sysargv[0])
             )
     return _hgexecutable
 
@@ -356,17 +356,17 @@
     """return environ with optional override, useful for shelling out"""
 
     def py2shell(val):
-        'convert python object into string that is useful to shell'
+        b'convert python object into string that is useful to shell'
         if val is None or val is False:
-            return '0'
+            return b'0'
         if val is True:
-            return '1'
+            return b'1'
         return pycompat.bytestr(val)
 
     env = dict(encoding.environ)
     if environ:
         env.update((k, py2shell(v)) for k, v in environ.iteritems())
-    env['HG'] = hgexecutable()
+    env[b'HG'] = hgexecutable()
     return env
 
 
@@ -420,11 +420,11 @@
             stdout=subprocess.PIPE,
             stderr=subprocess.STDOUT,
         )
-        for line in iter(proc.stdout.readline, ''):
+        for line in iter(proc.stdout.readline, b''):
             out.write(line)
         proc.wait()
         rc = proc.returncode
-    if pycompat.sysplatform == 'OpenVMS' and rc & 1:
+    if pycompat.sysplatform == b'OpenVMS' and rc & 1:
         rc = 0
     return rc
 
@@ -432,7 +432,7 @@
 def gui():
     '''Are we running in a GUI?'''
     if pycompat.isdarwin:
-        if 'SSH_CONNECTION' in encoding.environ:
+        if b'SSH_CONNECTION' in encoding.environ:
             # handle SSH access to a box where the user is logged in
             return False
         elif getattr(osutil, 'isgui', None):
@@ -442,7 +442,7 @@
             # pure build; use a safe default
             return True
     else:
-        return pycompat.iswindows or encoding.environ.get("DISPLAY")
+        return pycompat.iswindows or encoding.environ.get(b"DISPLAY")
 
 
 def hgcmd():
@@ -453,9 +453,9 @@
     get either the python call or current executable.
     """
     if mainfrozen():
-        if getattr(sys, 'frozen', None) == 'macosx_app':
+        if getattr(sys, 'frozen', None) == b'macosx_app':
             # Env variable set by py2app
-            return [encoding.environ['EXECUTABLEPATH']]
+            return [encoding.environ[b'EXECUTABLEPATH']]
         else:
             return [pycompat.sysexecutable]
     return _gethgcmd()
@@ -589,7 +589,7 @@
                     returncode = errno.EINVAL
                 raise OSError(
                     returncode,
-                    'error running %r: %s' % (cmd, os.strerror(returncode)),
+                    b'error running %r: %s' % (cmd, os.strerror(returncode)),
                 )
             return
 
@@ -598,11 +598,11 @@
             # Start a new session
             os.setsid()
 
-            stdin = open(os.devnull, 'r')
+            stdin = open(os.devnull, b'r')
             if stdout is None:
-                stdout = open(os.devnull, 'w')
+                stdout = open(os.devnull, b'w')
             if stderr is None:
-                stderr = open(os.devnull, 'w')
+                stderr = open(os.devnull, b'w')
 
             # connect stdin to devnull to make sure the subprocess can't
             # muck up that stream for mercurial.
--- a/mercurial/utils/repoviewutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/utils/repoviewutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -15,10 +15,10 @@
 # This create and ordering used for branchmap purpose.
 # the ordering may be partial
 subsettable = {
-    None: 'visible',
-    'visible-hidden': 'visible',
-    'visible': 'served',
-    'served.hidden': 'served',
-    'served': 'immutable',
-    'immutable': 'base',
+    None: b'visible',
+    b'visible-hidden': b'visible',
+    b'visible': b'served',
+    b'served.hidden': b'served',
+    b'served': b'immutable',
+    b'immutable': b'base',
 }
--- a/mercurial/utils/storageutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/utils/storageutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -195,7 +195,7 @@
             return store.node(fileid)
         except IndexError:
             raise error.LookupError(
-                '%d' % fileid, identifier, _('no match found')
+                b'%d' % fileid, identifier, _(b'no match found')
             )
 
     if len(fileid) == 20:
@@ -226,7 +226,7 @@
     except (ValueError, OverflowError):
         pass
 
-    raise error.LookupError(fileid, identifier, _('no match found'))
+    raise error.LookupError(fileid, identifier, _(b'no match found'))
 
 
 def resolvestripinfo(minlinkrev, tiprev, headrevs, linkrevfn, parentrevsfn):
@@ -361,9 +361,9 @@
     fnode = store.node
     frev = store.rev
 
-    if nodesorder == 'nodes':
+    if nodesorder == b'nodes':
         revs = [frev(n) for n in nodes]
-    elif nodesorder == 'linear':
+    elif nodesorder == b'linear':
         revs = set(frev(n) for n in nodes)
         revs = dagop.linearize(revs, store.parentrevs)
     else:  # storage and default
@@ -498,7 +498,7 @@
     # "\1\ncensored:". A delta producing such a censored revision must be a
     # full-replacement delta, so we inspect the first and only patch in the
     # delta for this prefix.
-    hlen = struct.calcsize(">lll")
+    hlen = struct.calcsize(b">lll")
     if len(delta) <= hlen:
         return False
 
@@ -507,6 +507,6 @@
     if delta[:hlen] != mdiff.replacediffheader(oldlen, newlen):
         return False
 
-    add = "\1\ncensored:"
+    add = b"\1\ncensored:"
     addlen = len(add)
     return newlen >= addlen and delta[hlen : hlen + addlen] == add
--- a/mercurial/utils/stringutil.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/utils/stringutil.py	Sun Oct 06 09:48:39 2019 -0400
@@ -66,24 +66,24 @@
 
     if isinstance(o, bytes):
         if bprefix:
-            yield "b'%s'" % escapestr(o)
+            yield b"b'%s'" % escapestr(o)
         else:
-            yield "'%s'" % escapestr(o)
+            yield b"'%s'" % escapestr(o)
     elif isinstance(o, bytearray):
         # codecs.escape_encode() can't handle bytearray, so escapestr fails
         # without coercion.
-        yield "bytearray['%s']" % escapestr(bytes(o))
+        yield b"bytearray['%s']" % escapestr(bytes(o))
     elif isinstance(o, list):
         if not o:
-            yield '[]'
+            yield b'[]'
             return
 
-        yield '['
+        yield b'['
 
         if indent:
             level += 1
-            yield '\n'
-            yield ' ' * (level * indent)
+            yield b'\n'
+            yield b' ' * (level * indent)
 
         for i, a in enumerate(o):
             for chunk in pprintgen(
@@ -93,28 +93,28 @@
 
             if i + 1 < len(o):
                 if indent:
-                    yield ',\n'
-                    yield ' ' * (level * indent)
+                    yield b',\n'
+                    yield b' ' * (level * indent)
                 else:
-                    yield ', '
+                    yield b', '
 
         if indent:
             level -= 1
-            yield '\n'
-            yield ' ' * (level * indent)
+            yield b'\n'
+            yield b' ' * (level * indent)
 
-        yield ']'
+        yield b']'
     elif isinstance(o, dict):
         if not o:
-            yield '{}'
+            yield b'{}'
             return
 
-        yield '{'
+        yield b'{'
 
         if indent:
             level += 1
-            yield '\n'
-            yield ' ' * (level * indent)
+            yield b'\n'
+            yield b' ' * (level * indent)
 
         for i, (k, v) in enumerate(sorted(o.items())):
             for chunk in pprintgen(
@@ -122,7 +122,7 @@
             ):
                 yield chunk
 
-            yield ': '
+            yield b': '
 
             for chunk in pprintgen(
                 v, bprefix=bprefix, indent=indent, level=level
@@ -131,28 +131,28 @@
 
             if i + 1 < len(o):
                 if indent:
-                    yield ',\n'
-                    yield ' ' * (level * indent)
+                    yield b',\n'
+                    yield b' ' * (level * indent)
                 else:
-                    yield ', '
+                    yield b', '
 
         if indent:
             level -= 1
-            yield '\n'
-            yield ' ' * (level * indent)
+            yield b'\n'
+            yield b' ' * (level * indent)
 
-        yield '}'
+        yield b'}'
     elif isinstance(o, set):
         if not o:
-            yield 'set([])'
+            yield b'set([])'
             return
 
-        yield 'set(['
+        yield b'set(['
 
         if indent:
             level += 1
-            yield '\n'
-            yield ' ' * (level * indent)
+            yield b'\n'
+            yield b' ' * (level * indent)
 
         for i, k in enumerate(sorted(o)):
             for chunk in pprintgen(
@@ -162,28 +162,28 @@
 
             if i + 1 < len(o):
                 if indent:
-                    yield ',\n'
-                    yield ' ' * (level * indent)
+                    yield b',\n'
+                    yield b' ' * (level * indent)
                 else:
-                    yield ', '
+                    yield b', '
 
         if indent:
             level -= 1
-            yield '\n'
-            yield ' ' * (level * indent)
+            yield b'\n'
+            yield b' ' * (level * indent)
 
-        yield '])'
+        yield b'])'
     elif isinstance(o, tuple):
         if not o:
-            yield '()'
+            yield b'()'
             return
 
-        yield '('
+        yield b'('
 
         if indent:
             level += 1
-            yield '\n'
-            yield ' ' * (level * indent)
+            yield b'\n'
+            yield b' ' * (level * indent)
 
         for i, a in enumerate(o):
             for chunk in pprintgen(
@@ -193,31 +193,31 @@
 
             if i + 1 < len(o):
                 if indent:
-                    yield ',\n'
-                    yield ' ' * (level * indent)
+                    yield b',\n'
+                    yield b' ' * (level * indent)
                 else:
-                    yield ', '
+                    yield b', '
 
         if indent:
             level -= 1
-            yield '\n'
-            yield ' ' * (level * indent)
+            yield b'\n'
+            yield b' ' * (level * indent)
 
-        yield ')'
+        yield b')'
     elif isinstance(o, types.GeneratorType):
         # Special case of empty generator.
         try:
             nextitem = next(o)
         except StopIteration:
-            yield 'gen[]'
+            yield b'gen[]'
             return
 
-        yield 'gen['
+        yield b'gen['
 
         if indent:
             level += 1
-            yield '\n'
-            yield ' ' * (level * indent)
+            yield b'\n'
+            yield b' ' * (level * indent)
 
         last = False
 
@@ -236,17 +236,17 @@
 
             if not last:
                 if indent:
-                    yield ',\n'
-                    yield ' ' * (level * indent)
+                    yield b',\n'
+                    yield b' ' * (level * indent)
                 else:
-                    yield ', '
+                    yield b', '
 
         if indent:
             level -= 1
-            yield '\n'
-            yield ' ' * (level * indent)
+            yield b'\n'
+            yield b' ' * (level * indent)
 
-        yield ']'
+        yield b']'
     else:
         yield pycompat.byterepr(o)
 
@@ -261,21 +261,21 @@
         #      ~~~~~~~~~~~~~~~~
         #      p0    p1        q0    q1
         q0 = -1
-        q1 = rs.find('<', p1 + 1)
+        q1 = rs.find(b'<', p1 + 1)
         if q1 < 0:
             q1 = len(rs)
-        elif q1 > p1 + 1 and rs.startswith('=', q1 - 1):
+        elif q1 > p1 + 1 and rs.startswith(b'=', q1 - 1):
             # backtrack for ' field=<'
-            q0 = rs.rfind(' ', p1 + 1, q1 - 1)
+            q0 = rs.rfind(b' ', p1 + 1, q1 - 1)
         if q0 < 0:
             q0 = q1
         else:
             q0 += 1  # skip ' '
-        l = rs.count('<', 0, p0) - rs.count('>', 0, p0)
+        l = rs.count(b'<', 0, p0) - rs.count(b'>', 0, p0)
         assert l >= 0
         lines.append((l, rs[p0:q0].rstrip()))
         p0, p1 = q0, q1
-    return '\n'.join('  ' * l + s for l, s in lines)
+    return b'\n'.join(b'  ' * l + s for l, s in lines)
 
 
 def buildrepr(r):
@@ -291,7 +291,7 @@
     ========  =================================
     """
     if r is None:
-        return ''
+        return b''
     elif isinstance(r, tuple):
         return r[0] % pycompat.rapply(pycompat.maybebytestr, r[1:])
     elif isinstance(r, bytes):
@@ -304,7 +304,7 @@
 
 def binary(s):
     """return true if a string is binary data"""
-    return bool(s and '\0' in s)
+    return bool(s and b'\0' in s)
 
 
 def stringmatcher(pattern, casesensitive=True):
@@ -345,7 +345,7 @@
     >>> itest(b'ABCDEFG', b'abc', b'def', b'abcdefg')
     ('literal', 'ABCDEFG', [False, False, True])
     """
-    if pattern.startswith('re:'):
+    if pattern.startswith(b're:'):
         pattern = pattern[3:]
         try:
             flags = 0
@@ -353,9 +353,9 @@
                 flags = remod.I
             regex = remod.compile(pattern, flags)
         except remod.error as e:
-            raise error.ParseError(_('invalid regular expression: %s') % e)
-        return 're', pattern, regex.search
-    elif pattern.startswith('literal:'):
+            raise error.ParseError(_(b'invalid regular expression: %s') % e)
+        return b're', pattern, regex.search
+    elif pattern.startswith(b'literal:'):
         pattern = pattern[8:]
 
     match = pattern.__eq__
@@ -363,21 +363,21 @@
     if not casesensitive:
         ipat = encoding.lower(pattern)
         match = lambda s: ipat == encoding.lower(s)
-    return 'literal', pattern, match
+    return b'literal', pattern, match
 
 
 def shortuser(user):
     """Return a short representation of a user name or email address."""
-    f = user.find('@')
+    f = user.find(b'@')
     if f >= 0:
         user = user[:f]
-    f = user.find('<')
+    f = user.find(b'<')
     if f >= 0:
         user = user[f + 1 :]
-    f = user.find(' ')
+    f = user.find(b' ')
     if f >= 0:
         user = user[:f]
-    f = user.find('.')
+    f = user.find(b'.')
     if f >= 0:
         user = user[:f]
     return user
@@ -385,10 +385,10 @@
 
 def emailuser(user):
     """Return the user portion of an email address."""
-    f = user.find('@')
+    f = user.find(b'@')
     if f >= 0:
         user = user[:f]
-    f = user.find('<')
+    f = user.find(b'<')
     if f >= 0:
         user = user[f + 1 :]
     return user
@@ -396,10 +396,10 @@
 
 def email(author):
     '''get email of author.'''
-    r = author.find('>')
+    r = author.find(b'>')
     if r == -1:
         r = None
-    return author[author.find('<') + 1 : r]
+    return author[author.find(b'<') + 1 : r]
 
 
 def person(author):
@@ -421,13 +421,13 @@
     >>> person(b'"Foo Bar <foo@bar>')
     'Foo Bar'
     """
-    if '@' not in author:
+    if b'@' not in author:
         return author
-    f = author.find('<')
+    f = author.find(b'<')
     if f != -1:
-        return author[:f].strip(' "').replace('\\"', '"')
-    f = author.find('@')
-    return author[:f].replace('.', ' ')
+        return author[:f].strip(b' "').replace(b'\\"', b'"')
+    f = author.find(b'@')
+    return author[:f].replace(b'.', b' ')
 
 
 @attr.s(hash=True)
@@ -497,7 +497,7 @@
 
         # Don't bother checking the line if it is a comment or
         # is an improperly formed author field
-        if line.lstrip().startswith('#'):
+        if line.lstrip().startswith(b'#'):
             continue
 
         # names, emails hold the parsed emails and names for each line
@@ -506,17 +506,17 @@
         namebuilder = []
 
         for element in line.split():
-            if element.startswith('#'):
+            if element.startswith(b'#'):
                 # If we reach a comment in the mailmap file, move on
                 break
 
-            elif element.startswith('<') and element.endswith('>'):
+            elif element.startswith(b'<') and element.endswith(b'>'):
                 # We have found an email.
                 # Parse it, and finalize any names from earlier
                 emails.append(element[1:-1])  # Slice off the "<>"
 
                 if namebuilder:
-                    names.append(' '.join(namebuilder))
+                    names.append(b' '.join(namebuilder))
                     namebuilder = []
 
                 # Break if we have found a second email, any other
@@ -587,7 +587,7 @@
         proper = mailmap.get(commit2, mailmapping(None, None))
 
     # Return the author field with proper values filled in
-    return '%s <%s>' % (
+    return b'%s <%s>' % (
         proper.name if proper.name else commit.name,
         proper.email if proper.email else commit.email,
     )
@@ -620,7 +620,7 @@
 
 def ellipsis(text, maxlength=400):
     """Trim string to at most maxlength (default: 400) columns in display."""
-    return encoding.trim(text, maxlength, ellipsis='...')
+    return encoding.trim(text, maxlength, ellipsis=b'...')
 
 
 def escapestr(s):
@@ -675,7 +675,7 @@
                 l += colwidth(ucstr[i])
                 if space_left < l:
                     return (ucstr[:i], ucstr[i:])
-            return ucstr, ''
+            return ucstr, b''
 
         # overriding of base class
         def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
@@ -695,7 +695,7 @@
 
             lines = []
             if self.width <= 0:
-                raise ValueError("invalid width %r (must be > 0)" % self.width)
+                raise ValueError(b"invalid width %r (must be > 0)" % self.width)
 
             # Arrange in reverse order so items can be efficiently popped
             # from a stack of chucks.
@@ -759,7 +759,7 @@
     return tw(**kwargs)
 
 
-def wrap(line, width, initindent='', hangindent=''):
+def wrap(line, width, initindent=b'', hangindent=b''):
     maxindent = max(len(hangindent), len(initindent))
     if width <= maxindent:
         # adjust for weird terminal size
@@ -783,16 +783,16 @@
 
 
 _booleans = {
-    '1': True,
-    'yes': True,
-    'true': True,
-    'on': True,
-    'always': True,
-    '0': False,
-    'no': False,
-    'false': False,
-    'off': False,
-    'never': False,
+    b'1': True,
+    b'yes': True,
+    b'true': True,
+    b'on': True,
+    b'always': True,
+    b'0': False,
+    b'no': False,
+    b'false': False,
+    b'off': False,
+    b'never': False,
 }
 
 
--- a/mercurial/verify.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/verify.py	Sun Oct 06 09:48:39 2019 -0400
@@ -35,8 +35,8 @@
 def _normpath(f):
     # under hg < 2.4, convert didn't sanitize paths properly, so a
     # converted repo may contain repeated slashes
-    while '//' in f:
-        f = f.replace('//', '/')
+    while b'//' in f:
+        f = f.replace(b'//', b'/')
     return f
 
 
@@ -58,25 +58,25 @@
         self.refersmf = False
         self.fncachewarned = False
         # developer config: verify.skipflags
-        self.skipflags = repo.ui.configint('verify', 'skipflags')
+        self.skipflags = repo.ui.configint(b'verify', b'skipflags')
         self.warnorphanstorefiles = True
 
     def _warn(self, msg):
         """record a "warning" level issue"""
-        self.ui.warn(msg + "\n")
+        self.ui.warn(msg + b"\n")
         self.warnings += 1
 
     def _err(self, linkrev, msg, filename=None):
         """record a "error" level issue"""
         if linkrev is not None:
             self.badrevs.add(linkrev)
-            linkrev = "%d" % linkrev
+            linkrev = b"%d" % linkrev
         else:
-            linkrev = '?'
-        msg = "%s: %s" % (linkrev, msg)
+            linkrev = b'?'
+        msg = b"%s: %s" % (linkrev, msg)
         if filename:
-            msg = "%s@%s" % (filename, msg)
-        self.ui.warn(" " + msg + "\n")
+            msg = b"%s@%s" % (filename, msg)
+        self.ui.warn(b" " + msg + b"\n")
         self.errors += 1
 
     def _exc(self, linkrev, msg, inst, filename=None):
@@ -84,7 +84,7 @@
         fmsg = pycompat.bytestr(inst)
         if not fmsg:
             fmsg = pycompat.byterepr(inst)
-        self._err(linkrev, "%s: %s" % (msg, fmsg), filename)
+        self._err(linkrev, b"%s: %s" % (msg, fmsg), filename)
 
     def _checkrevlog(self, obj, name, linkrev):
         """verify high level property of a revlog
@@ -95,20 +95,20 @@
         - revlog's format version is correct.
         """
         if not len(obj) and (self.havecl or self.havemf):
-            self._err(linkrev, _("empty or missing %s") % name)
+            self._err(linkrev, _(b"empty or missing %s") % name)
             return
 
         d = obj.checksize()
         if d[0]:
-            self._err(None, _("data length off by %d bytes") % d[0], name)
+            self._err(None, _(b"data length off by %d bytes") % d[0], name)
         if d[1]:
-            self._err(None, _("index contains %d extra bytes") % d[1], name)
+            self._err(None, _(b"index contains %d extra bytes") % d[1], name)
 
         if obj.version != revlog.REVLOGV0:
             if not self.revlogv1:
-                self._warn(_("warning: `%s' uses revlog format 1") % name)
+                self._warn(_(b"warning: `%s' uses revlog format 1") % name)
         elif self.revlogv1:
-            self._warn(_("warning: `%s' uses revlog format 0") % name)
+            self._warn(_(b"warning: `%s' uses revlog format 0") % name)
 
     def _checkentry(self, obj, i, node, seen, linkrevs, f):
         """verify a single revlog entry
@@ -133,9 +133,9 @@
         lr = obj.linkrev(obj.rev(node))
         if lr < 0 or (self.havecl and lr not in linkrevs):
             if lr < 0 or lr >= len(self.repo.changelog):
-                msg = _("rev %d points to nonexistent changeset %d")
+                msg = _(b"rev %d points to nonexistent changeset %d")
             else:
-                msg = _("rev %d points to unexpected changeset %d")
+                msg = _(b"rev %d points to unexpected changeset %d")
             self._err(None, msg % (i, lr), f)
             if linkrevs:
                 if f and len(linkrevs) > 1:
@@ -149,8 +149,8 @@
                     except Exception:
                         pass
                 self._warn(
-                    _(" (expected %s)")
-                    % " ".join(map(pycompat.bytestr, linkrevs))
+                    _(b" (expected %s)")
+                    % b" ".join(map(pycompat.bytestr, linkrevs))
                 )
             lr = None  # can't be trusted
 
@@ -159,20 +159,20 @@
             if p1 not in seen and p1 != nullid:
                 self._err(
                     lr,
-                    _("unknown parent 1 %s of %s") % (short(p1), short(node)),
+                    _(b"unknown parent 1 %s of %s") % (short(p1), short(node)),
                     f,
                 )
             if p2 not in seen and p2 != nullid:
                 self._err(
                     lr,
-                    _("unknown parent 2 %s of %s") % (short(p2), short(node)),
+                    _(b"unknown parent 2 %s of %s") % (short(p2), short(node)),
                     f,
                 )
         except Exception as inst:
-            self._exc(lr, _("checking parents of %s") % short(node), inst, f)
+            self._exc(lr, _(b"checking parents of %s") % short(node), inst, f)
 
         if node in seen:
-            self._err(lr, _("duplicate revision %d (%d)") % (i, seen[node]), f)
+            self._err(lr, _(b"duplicate revision %d (%d)") % (i, seen[node]), f)
         seen[node] = i
         return lr
 
@@ -185,15 +185,15 @@
         # initial validation and generic report
         repo = self.repo
         ui = repo.ui
-        if not repo.url().startswith('file:'):
-            raise error.Abort(_("cannot verify bundle or remote repos"))
+        if not repo.url().startswith(b'file:'):
+            raise error.Abort(_(b"cannot verify bundle or remote repos"))
 
-        if os.path.exists(repo.sjoin("journal")):
-            ui.warn(_("abandoned transaction found - run hg recover\n"))
+        if os.path.exists(repo.sjoin(b"journal")):
+            ui.warn(_(b"abandoned transaction found - run hg recover\n"))
 
         if ui.verbose or not self.revlogv1:
             ui.status(
-                _("repository uses revlog format %d\n")
+                _(b"repository uses revlog format %d\n")
                 % (self.revlogv1 and 1 or 0)
             )
 
@@ -206,23 +206,23 @@
 
         # final report
         ui.status(
-            _("checked %d changesets with %d changes to %d files\n")
+            _(b"checked %d changesets with %d changes to %d files\n")
             % (len(repo.changelog), filerevisions, totalfiles)
         )
         if self.warnings:
-            ui.warn(_("%d warnings encountered!\n") % self.warnings)
+            ui.warn(_(b"%d warnings encountered!\n") % self.warnings)
         if self.fncachewarned:
             ui.warn(
                 _(
-                    'hint: run "hg debugrebuildfncache" to recover from '
-                    'corrupt fncache\n'
+                    b'hint: run "hg debugrebuildfncache" to recover from '
+                    b'corrupt fncache\n'
                 )
             )
         if self.errors:
-            ui.warn(_("%d integrity errors encountered!\n") % self.errors)
+            ui.warn(_(b"%d integrity errors encountered!\n") % self.errors)
             if self.badrevs:
                 ui.warn(
-                    _("(first damaged changeset appears to be %d)\n")
+                    _(b"(first damaged changeset appears to be %d)\n")
                     % min(self.badrevs)
                 )
             return 1
@@ -249,18 +249,18 @@
         match = self.match
         cl = repo.changelog
 
-        ui.status(_("checking changesets\n"))
+        ui.status(_(b"checking changesets\n"))
         mflinkrevs = {}
         filelinkrevs = {}
         seen = {}
-        self._checkrevlog(cl, "changelog", 0)
+        self._checkrevlog(cl, b"changelog", 0)
         progress = ui.makeprogress(
-            _('checking'), unit=_('changesets'), total=len(repo)
+            _(b'checking'), unit=_(b'changesets'), total=len(repo)
         )
         for i in repo:
             progress.update(i)
             n = cl.node(i)
-            self._checkentry(cl, i, n, seen, [i], "changelog")
+            self._checkentry(cl, i, n, seen, [i], b"changelog")
 
             try:
                 changes = cl.read(n)
@@ -272,12 +272,12 @@
                         filelinkrevs.setdefault(_normpath(f), []).append(i)
             except Exception as inst:
                 self.refersmf = True
-                self._exc(i, _("unpacking changeset %s") % short(n), inst)
+                self._exc(i, _(b"unpacking changeset %s") % short(n), inst)
         progress.complete()
         return mflinkrevs, filelinkrevs
 
     def _verifymanifest(
-        self, mflinkrevs, dir="", storefiles=None, subdirprogress=None
+        self, mflinkrevs, dir=b"", storefiles=None, subdirprogress=None
     ):
         """verify the manifestlog content
 
@@ -313,12 +313,12 @@
         mf = mfl.getstorage(dir)
 
         if not dir:
-            self.ui.status(_("checking manifests\n"))
+            self.ui.status(_(b"checking manifests\n"))
 
         filenodes = {}
         subdirnodes = {}
         seen = {}
-        label = "manifest"
+        label = b"manifest"
         if dir:
             label = dir
             revlogfiles = mf.files()
@@ -330,7 +330,7 @@
             # null manifests.
             self._checkrevlog(mf, label, 0)
         progress = ui.makeprogress(
-            _('checking'), unit=_('manifests'), total=len(mf)
+            _(b'checking'), unit=_(b'manifests'), total=len(mf)
         )
         for i in mf:
             if not dir:
@@ -342,24 +342,24 @@
             elif dir:
                 self._err(
                     lr,
-                    _("%s not in parent-directory manifest") % short(n),
+                    _(b"%s not in parent-directory manifest") % short(n),
                     label,
                 )
             else:
-                self._err(lr, _("%s not in changesets") % short(n), label)
+                self._err(lr, _(b"%s not in changesets") % short(n), label)
 
             try:
                 mfdelta = mfl.get(dir, n).readdelta(shallow=True)
                 for f, fn, fl in mfdelta.iterentries():
                     if not f:
-                        self._err(lr, _("entry without name in manifest"))
-                    elif f == "/dev/null":  # ignore this in very old repos
+                        self._err(lr, _(b"entry without name in manifest"))
+                    elif f == b"/dev/null":  # ignore this in very old repos
                         continue
                     fullpath = dir + _normpath(f)
-                    if fl == 't':
+                    if fl == b't':
                         if not match.visitdir(fullpath):
                             continue
-                        subdirnodes.setdefault(fullpath + '/', {}).setdefault(
+                        subdirnodes.setdefault(fullpath + b'/', {}).setdefault(
                             fn, []
                         ).append(lr)
                     else:
@@ -367,7 +367,7 @@
                             continue
                         filenodes.setdefault(fullpath, {}).setdefault(fn, lr)
             except Exception as inst:
-                self._exc(lr, _("reading delta %s") % short(n), inst, label)
+                self._exc(lr, _(b"reading delta %s") % short(n), inst, label)
             if self._level >= VERIFY_FULL:
                 try:
                     # Various issues can affect manifest. So we read each full
@@ -377,7 +377,7 @@
                 except Exception as inst:
                     self._exc(
                         lr,
-                        _("reading full manifest %s") % short(n),
+                        _(b"reading full manifest %s") % short(n),
                         inst,
                         label,
                     )
@@ -394,8 +394,8 @@
                     self._err(
                         c,
                         _(
-                            "parent-directory manifest refers to unknown"
-                            " revision %s"
+                            b"parent-directory manifest refers to unknown"
+                            b" revision %s"
                         )
                         % short(m),
                         label,
@@ -403,23 +403,24 @@
                 else:
                     self._err(
                         c,
-                        _("changeset refers to unknown revision %s") % short(m),
+                        _(b"changeset refers to unknown revision %s")
+                        % short(m),
                         label,
                     )
 
         if not dir and subdirnodes:
-            self.ui.status(_("checking directory manifests\n"))
+            self.ui.status(_(b"checking directory manifests\n"))
             storefiles = set()
             subdirs = set()
             revlogv1 = self.revlogv1
             for f, f2, size in repo.store.datafiles():
                 if not f:
-                    self._err(None, _("cannot decode filename '%s'") % f2)
-                elif (size > 0 or not revlogv1) and f.startswith('meta/'):
+                    self._err(None, _(b"cannot decode filename '%s'") % f2)
+                elif (size > 0 or not revlogv1) and f.startswith(b'meta/'):
                     storefiles.add(_normpath(f))
                     subdirs.add(os.path.dirname(f))
             subdirprogress = ui.makeprogress(
-                _('checking'), unit=_('manifests'), total=len(subdirs)
+                _(b'checking'), unit=_(b'manifests'), total=len(subdirs)
             )
 
         for subdir, linkrevs in subdirnodes.iteritems():
@@ -433,25 +434,25 @@
             subdirprogress.complete()
             if self.warnorphanstorefiles:
                 for f in sorted(storefiles):
-                    self._warn(_("warning: orphan data file '%s'") % f)
+                    self._warn(_(b"warning: orphan data file '%s'") % f)
 
         return filenodes
 
     def _crosscheckfiles(self, filelinkrevs, filenodes):
         repo = self.repo
         ui = self.ui
-        ui.status(_("crosschecking files in changesets and manifests\n"))
+        ui.status(_(b"crosschecking files in changesets and manifests\n"))
 
         total = len(filelinkrevs) + len(filenodes)
         progress = ui.makeprogress(
-            _('crosschecking'), unit=_('files'), total=total
+            _(b'crosschecking'), unit=_(b'files'), total=total
         )
         if self.havemf:
             for f in sorted(filelinkrevs):
                 progress.increment()
                 if f not in filenodes:
                     lr = filelinkrevs[f][0]
-                    self._err(lr, _("in changeset but not in manifest"), f)
+                    self._err(lr, _(b"in changeset but not in manifest"), f)
 
         if self.havecl:
             for f in sorted(filenodes):
@@ -462,7 +463,7 @@
                         lr = min([fl.linkrev(fl.rev(n)) for n in filenodes[f]])
                     except Exception:
                         lr = None
-                    self._err(lr, _("in manifest but not in changeset"), f)
+                    self._err(lr, _(b"in manifest but not in changeset"), f)
 
         progress.complete()
 
@@ -472,27 +473,27 @@
         lrugetctx = self.lrugetctx
         revlogv1 = self.revlogv1
         havemf = self.havemf
-        ui.status(_("checking files\n"))
+        ui.status(_(b"checking files\n"))
 
         storefiles = set()
         for f, f2, size in repo.store.datafiles():
             if not f:
-                self._err(None, _("cannot decode filename '%s'") % f2)
-            elif (size > 0 or not revlogv1) and f.startswith('data/'):
+                self._err(None, _(b"cannot decode filename '%s'") % f2)
+            elif (size > 0 or not revlogv1) and f.startswith(b'data/'):
                 storefiles.add(_normpath(f))
 
         state = {
             # TODO this assumes revlog storage for changelog.
-            'expectedversion': self.repo.changelog.version & 0xFFFF,
-            'skipflags': self.skipflags,
+            b'expectedversion': self.repo.changelog.version & 0xFFFF,
+            b'skipflags': self.skipflags,
             # experimental config: censor.policy
-            'erroroncensored': ui.config('censor', 'policy') == 'abort',
+            b'erroroncensored': ui.config(b'censor', b'policy') == b'abort',
         }
 
         files = sorted(set(filenodes) | set(filelinkrevs))
         revisions = 0
         progress = ui.makeprogress(
-            _('checking'), unit=_('files'), total=len(files)
+            _(b'checking'), unit=_(b'files'), total=len(files)
         )
         for i, f in enumerate(files):
             progress.update(i, item=f)
@@ -510,7 +511,7 @@
             try:
                 fl = repo.file(f)
             except error.StorageError as e:
-                self._err(lr, _("broken revlog! (%s)") % e, f)
+                self._err(lr, _(b"broken revlog! (%s)") % e, f)
                 continue
 
             for ff in fl.files():
@@ -519,15 +520,15 @@
                 except KeyError:
                     if self.warnorphanstorefiles:
                         self._warn(
-                            _(" warning: revlog '%s' not in fncache!") % ff
+                            _(b" warning: revlog '%s' not in fncache!") % ff
                         )
                         self.fncachewarned = True
 
             if not len(fl) and (self.havecl or self.havemf):
-                self._err(lr, _("empty or missing %s") % f)
+                self._err(lr, _(b"empty or missing %s") % f)
             else:
                 # Guard against implementations not setting this.
-                state['skipread'] = set()
+                state[b'skipread'] = set()
                 for problem in fl.verifyintegrity(state):
                     if problem.node is not None:
                         linkrev = fl.linkrev(fl.rev(problem.node))
@@ -544,8 +545,8 @@
                         )
                     else:
                         raise error.ProgrammingError(
-                            'problem instance does not set warning or error '
-                            'attribute: %s' % problem.msg
+                            b'problem instance does not set warning or error '
+                            b'attribute: %s' % problem.msg
                         )
 
             seen = {}
@@ -555,11 +556,11 @@
                 lr = self._checkentry(fl, i, n, seen, linkrevs, f)
                 if f in filenodes:
                     if havemf and n not in filenodes[f]:
-                        self._err(lr, _("%s not in manifests") % (short(n)), f)
+                        self._err(lr, _(b"%s not in manifests") % (short(n)), f)
                     else:
                         del filenodes[f][n]
 
-                if n in state['skipread']:
+                if n in state[b'skipread']:
                     continue
 
                 # check renames
@@ -574,8 +575,8 @@
                             if not any(rp[0] in pctx for pctx in ctx.parents()):
                                 self._warn(
                                     _(
-                                        "warning: copy source of '%s' not"
-                                        " in parents of %s"
+                                        b"warning: copy source of '%s' not"
+                                        b" in parents of %s"
                                     )
                                     % (f, ctx)
                                 )
@@ -584,8 +585,8 @@
                             self._err(
                                 lr,
                                 _(
-                                    "empty or missing copy source revlog "
-                                    "%s:%s"
+                                    b"empty or missing copy source revlog "
+                                    b"%s:%s"
                                 )
                                 % (rp[0], short(rp[1])),
                                 f,
@@ -593,8 +594,8 @@
                         elif rp[1] == nullid:
                             ui.note(
                                 _(
-                                    "warning: %s@%s: copy source"
-                                    " revision is nullid %s:%s\n"
+                                    b"warning: %s@%s: copy source"
+                                    b" revision is nullid %s:%s\n"
                                 )
                                 % (f, lr, rp[0], short(rp[1]))
                             )
@@ -602,7 +603,7 @@
                             fl2.rev(rp[1])
                 except Exception as inst:
                     self._exc(
-                        lr, _("checking rename of %s") % short(n), inst, f
+                        lr, _(b"checking rename of %s") % short(n), inst, f
                     )
 
             # cross-check
@@ -611,7 +612,7 @@
                 for lr, node in sorted(fns):
                     self._err(
                         lr,
-                        _("manifest refers to unknown revision %s")
+                        _(b"manifest refers to unknown revision %s")
                         % short(node),
                         f,
                     )
@@ -619,6 +620,6 @@
 
         if self.warnorphanstorefiles:
             for f in sorted(storefiles):
-                self._warn(_("warning: orphan data file '%s'") % f)
+                self._warn(_(b"warning: orphan data file '%s'") % f)
 
         return len(files), revisions
--- a/mercurial/vfs.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/vfs.py	Sun Oct 06 09:48:39 2019 -0400
@@ -47,7 +47,7 @@
 
     def __init__(self, *args, **kwargs):
         '''Prevent instantiation; don't call this from subclasses.'''
-        raise NotImplementedError('attempted instantiating ' + str(type(self)))
+        raise NotImplementedError(b'attempted instantiating ' + str(type(self)))
 
     def _auditpath(self, path, mode):
         raise NotImplementedError
@@ -59,9 +59,9 @@
         except IOError as inst:
             if inst.errno != errno.ENOENT:
                 raise
-        return ""
+        return b""
 
-    def tryreadlines(self, path, mode='rb'):
+    def tryreadlines(self, path, mode=b'rb'):
         '''gracefully return an empty array for missing files'''
         try:
             return self.readlines(path, mode=mode)
@@ -81,23 +81,23 @@
         return self.__call__
 
     def read(self, path):
-        with self(path, 'rb') as fp:
+        with self(path, b'rb') as fp:
             return fp.read()
 
-    def readlines(self, path, mode='rb'):
+    def readlines(self, path, mode=b'rb'):
         with self(path, mode=mode) as fp:
             return fp.readlines()
 
     def write(self, path, data, backgroundclose=False, **kwargs):
-        with self(path, 'wb', backgroundclose=backgroundclose, **kwargs) as fp:
+        with self(path, b'wb', backgroundclose=backgroundclose, **kwargs) as fp:
             return fp.write(data)
 
-    def writelines(self, path, data, mode='wb', notindexed=False):
+    def writelines(self, path, data, mode=b'wb', notindexed=False):
         with self(path, mode=mode, notindexed=notindexed) as fp:
             return fp.writelines(data)
 
     def append(self, path, data):
-        with self(path, 'ab') as fp:
+        with self(path, b'ab') as fp:
             return fp.write(data)
 
     def basename(self, path):
@@ -175,7 +175,7 @@
     def mkdir(self, path=None):
         return os.mkdir(self.join(path))
 
-    def mkstemp(self, suffix='', prefix='tmp', dir=None):
+    def mkstemp(self, suffix=b'', prefix=b'tmp', dir=None):
         fd, name = pycompat.mkstemp(
             suffix=suffix, prefix=prefix, dir=self.join(dir)
         )
@@ -203,7 +203,7 @@
         checkambig=True only in limited cases (see also issue5418 and
         issue5584 for detail).
         """
-        self._auditpath(dst, 'w')
+        self._auditpath(dst, b'w')
         srcpath = self.join(src)
         dstpath = self.join(dst)
         oldstat = checkambig and util.filestat.frompath(dstpath)
@@ -302,7 +302,7 @@
         vfs = getattr(self, 'vfs', self)
         if getattr(vfs, '_backgroundfilecloser', None):
             raise error.Abort(
-                _('can only have 1 active background file closer')
+                _(b'can only have 1 active background file closer')
             )
 
         with backgroundfilecloser(ui, expectedcount=expectedcount) as bfc:
@@ -365,13 +365,13 @@
                 path = os.path.relpath(path, self.base)
             r = util.checkosfilename(path)
             if r:
-                raise error.Abort("%s: %r" % (r, path))
+                raise error.Abort(b"%s: %r" % (r, path))
             self.audit(path, mode=mode)
 
     def __call__(
         self,
         path,
-        mode="r",
+        mode=b"r",
         atomictemp=False,
         notindexed=False,
         backgroundclose=False,
@@ -413,11 +413,11 @@
             self._auditpath(path, mode)
         f = self.join(path)
 
-        if "b" not in mode:
-            mode += "b"  # for that other OS
+        if b"b" not in mode:
+            mode += b"b"  # for that other OS
 
         nlink = -1
-        if mode not in ('r', 'rb'):
+        if mode not in (b'r', b'rb'):
             dirname, basename = util.split(f)
             # If basename is empty, then the path is malformed because it points
             # to a directory. Let the posixfile() call below raise IOError.
@@ -429,7 +429,7 @@
                         f, mode, self.createmode, checkambig=checkambig
                     )
                 try:
-                    if 'w' in mode:
+                    if b'w' in mode:
                         util.unlink(f)
                         nlink = 0
                     else:
@@ -455,11 +455,11 @@
             self._fixfilemode(f)
 
         if checkambig:
-            if mode in ('r', 'rb'):
+            if mode in (b'r', b'rb'):
                 raise error.Abort(
                     _(
-                        'implementation error: mode %s is not'
-                        ' valid for checkambig=True'
+                        b'implementation error: mode %s is not'
+                        b' valid for checkambig=True'
                     )
                     % mode
                 )
@@ -471,8 +471,8 @@
             if not self._backgroundfilecloser:
                 raise error.Abort(
                     _(
-                        'backgroundclose can only be used when a '
-                        'backgroundclosing context manager is active'
+                        b'backgroundclose can only be used when a '
+                        b'backgroundclosing context manager is active'
                     )
                 )
 
@@ -493,7 +493,7 @@
             except OSError as err:
                 raise OSError(
                     err.errno,
-                    _('could not symlink to %r: %s')
+                    _(b'could not symlink to %r: %s')
                     % (src, encoding.strtolocal(err.strerror)),
                     linkname,
                 )
@@ -552,9 +552,9 @@
     def __init__(self, vfs):
         proxyvfs.__init__(self, vfs)
 
-    def __call__(self, path, mode='r', *args, **kw):
-        if mode not in ('r', 'rb'):
-            raise error.Abort(_('this vfs is read only'))
+    def __call__(self, path, mode=b'r', *args, **kw):
+        if mode not in (b'r', b'rb'):
+            raise error.Abort(_(b'this vfs is read only'))
         return self.vfs(path, mode, *args, **kw)
 
     def join(self, path, *insidef):
@@ -584,10 +584,10 @@
         return self
 
     def __exit__(self, exc_type, exc_value, exc_tb):
-        raise NotImplementedError('attempted instantiating ' + str(type(self)))
+        raise NotImplementedError(b'attempted instantiating ' + str(type(self)))
 
     def close(self):
-        raise NotImplementedError('attempted instantiating ' + str(type(self)))
+        raise NotImplementedError(b'attempted instantiating ' + str(type(self)))
 
 
 class delayclosedfile(closewrapbase):
@@ -619,7 +619,7 @@
         # Only Windows/NTFS has slow file closing. So only enable by default
         # on that platform. But allow to be enabled elsewhere for testing.
         defaultenabled = pycompat.iswindows
-        enabled = ui.configbool('worker', 'backgroundclose', defaultenabled)
+        enabled = ui.configbool(b'worker', b'backgroundclose', defaultenabled)
 
         if not enabled:
             return
@@ -627,24 +627,24 @@
         # There is overhead to starting and stopping the background threads.
         # Don't do background processing unless the file count is large enough
         # to justify it.
-        minfilecount = ui.configint('worker', 'backgroundcloseminfilecount')
+        minfilecount = ui.configint(b'worker', b'backgroundcloseminfilecount')
         # FUTURE dynamically start background threads after minfilecount closes.
         # (We don't currently have any callers that don't know their file count)
         if expectedcount > 0 and expectedcount < minfilecount:
             return
 
-        maxqueue = ui.configint('worker', 'backgroundclosemaxqueue')
-        threadcount = ui.configint('worker', 'backgroundclosethreadcount')
+        maxqueue = ui.configint(b'worker', b'backgroundclosemaxqueue')
+        threadcount = ui.configint(b'worker', b'backgroundclosethreadcount')
 
         ui.debug(
-            'starting %d threads for background file closing\n' % threadcount
+            b'starting %d threads for background file closing\n' % threadcount
         )
 
         self._queue = pycompat.queue.Queue(maxsize=maxqueue)
         self._running = True
 
         for i in range(threadcount):
-            t = threading.Thread(target=self._worker, name='backgroundcloser')
+            t = threading.Thread(target=self._worker, name=b'backgroundcloser')
             self._threads.append(t)
             t.start()
 
@@ -680,7 +680,7 @@
         """Schedule a file for closing."""
         if not self._entered:
             raise error.Abort(
-                _('can only call close() when context manager ' 'active')
+                _(b'can only call close() when context manager ' b'active')
             )
 
         # If a background thread encountered an exception, raise now so we fail
--- a/mercurial/win32.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/win32.py	Sun Oct 06 09:48:39 2019 -0400
@@ -404,7 +404,7 @@
         X509_ASN_ENCODING, cert, len(cert)
     )
     if certctx is None:
-        _raiseoserror('CertCreateCertificateContext')
+        _raiseoserror(b'CertCreateCertificateContext')
 
     flags = 0
 
@@ -423,7 +423,7 @@
             None,  # pvReserved
             ctypes.byref(pchainctx),
         ):
-            _raiseoserror('CertGetCertificateChain')
+            _raiseoserror(b'CertGetCertificateChain')
 
         chainctx = pchainctx.contents
 
@@ -543,7 +543,7 @@
     t = _kernel32.GetDriveTypeA(volume)
 
     if t == _DRIVE_REMOTE:
-        return 'cifs'
+        return b'cifs'
     elif t not in (
         _DRIVE_REMOVABLE,
         _DRIVE_FIXED,
@@ -663,12 +663,12 @@
 
     pi = _PROCESS_INFORMATION()
 
-    env = ''
+    env = b''
     for k in encoding.environ:
-        env += "%s=%s\0" % (k, encoding.environ[k])
+        env += b"%s=%s\0" % (k, encoding.environ[k])
     if not env:
-        env = '\0'
-    env += '\0'
+        env = b'\0'
+    env += b'\0'
 
     args = subprocess.list2cmdline(pycompat.rapply(encoding.strfromlocal, args))
 
@@ -724,7 +724,7 @@
     # implicit zombie filename blocking on a temporary name.
 
     for tries in pycompat.xrange(10):
-        temp = '%s-%08x' % (f, random.randint(0, 0xFFFFFFFF))
+        temp = b'%s-%08x' % (f, random.randint(0, 0xFFFFFFFF))
         try:
             os.rename(f, temp)  # raises OSError EEXIST if temp exists
             break
--- a/mercurial/windows.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/windows.py	Sun Oct 06 09:48:39 2019 -0400
@@ -153,7 +153,7 @@
         return getattr(self._fp, name)
 
 
-def posixfile(name, mode='r', buffering=-1):
+def posixfile(name, mode=b'r', buffering=-1):
     '''Open a file with even more POSIX-like semantics'''
     try:
         fp = osutil.posixfile(name, mode, buffering)  # may raise WindowsError
@@ -165,10 +165,10 @@
 
         # The position when opening in append mode is implementation defined, so
         # make it consistent with other platforms, which position at EOF.
-        if 'a' in mode:
+        if b'a' in mode:
             fp.seek(0, os.SEEK_END)
 
-        if '+' in mode:
+        if b'+' in mode:
             return mixedfilemodewrapper(fp)
 
         return fp
@@ -230,7 +230,7 @@
     try:
         return sys.getwindowsversion()[3] == 1
     except AttributeError:
-        return 'command' in encoding.environ.get('comspec', '')
+        return b'command' in encoding.environ.get(b'comspec', b'')
 
 
 def openhardlinks():
@@ -240,23 +240,23 @@
 def parsepatchoutput(output_line):
     """parses the output produced by patch and returns the filename"""
     pf = output_line[14:]
-    if pf[0] == '`':
+    if pf[0] == b'`':
         pf = pf[1:-1]  # Remove the quotes
     return pf
 
 
 def sshargs(sshcmd, host, user, port):
     '''Build argument list for ssh or Plink'''
-    pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
-    args = user and ("%s@%s" % (user, host)) or host
-    if args.startswith('-') or args.startswith('/'):
+    pflag = b'plink' in sshcmd.lower() and b'-P' or b'-p'
+    args = user and (b"%s@%s" % (user, host)) or host
+    if args.startswith(b'-') or args.startswith(b'/'):
         raise error.Abort(
-            _('illegal ssh hostname or username starting with - or /: %s')
+            _(b'illegal ssh hostname or username starting with - or /: %s')
             % args
         )
     args = shellquote(args)
     if port:
-        args = '%s %s %s' % (pflag, shellquote(port), args)
+        args = b'%s %s %s' % (pflag, shellquote(port), args)
     return args
 
 
@@ -285,11 +285,11 @@
 
 
 def pconvert(path):
-    return path.replace(pycompat.ossep, '/')
+    return path.replace(pycompat.ossep, b'/')
 
 
 def localpath(path):
-    return path.replace('/', '\\')
+    return path.replace(b'/', b'\\')
 
 
 def normpath(path):
@@ -407,7 +407,7 @@
             and index + 1 < pathlen
             and path[index + 1 : index + 2] in (b'\\', b'/')
         ):
-            res += "%USERPROFILE%"
+            res += b"%USERPROFILE%"
         elif (
             c == b'\\'
             and index + 1 < pathlen
@@ -483,7 +483,7 @@
     """Build a command string suitable for os.popen* calls."""
     if sys.version_info < (2, 7, 1):
         # Python versions since 2.7.1 do this extra quoting themselves
-        return '"' + cmd + '"'
+        return b'"' + cmd + b'"'
     return cmd
 
 
@@ -499,13 +499,13 @@
     PATH isn't searched if command is an absolute or relative path.
     An extension from PATHEXT is found and added if not present.
     If command isn't found None is returned.'''
-    pathext = encoding.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
+    pathext = encoding.environ.get(b'PATHEXT', b'.COM;.EXE;.BAT;.CMD')
     pathexts = [ext for ext in pathext.lower().split(pycompat.ospathsep)]
     if os.path.splitext(command)[1].lower() in pathexts:
-        pathexts = ['']
+        pathexts = [b'']
 
     def findexisting(pathcommand):
-        'Will append extension (if needed) and return existing file'
+        b'Will append extension (if needed) and return existing file'
         for ext in pathexts:
             executable = pathcommand + ext
             if os.path.exists(executable):
@@ -515,7 +515,7 @@
     if pycompat.ossep in command:
         return findexisting(command)
 
-    for path in encoding.environ.get('PATH', '').split(pycompat.ospathsep):
+    for path in encoding.environ.get(b'PATH', b'').split(pycompat.ospathsep):
         executable = findexisting(os.path.join(path, command))
         if executable is not None:
             return executable
@@ -536,7 +536,7 @@
         nf = normcase(nf)
         dir, base = os.path.split(nf)
         if not dir:
-            dir = '.'
+            dir = b'.'
         cache = dircache.get(dir, None)
         if cache is None:
             try:
@@ -681,7 +681,7 @@
             break
         chunks.append(s)
 
-    return ''.join(chunks)
+    return b''.join(chunks)
 
 
 def bindunixsocket(sock, path):
--- a/mercurial/wireprotoframing.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/wireprotoframing.py	Sun Oct 06 09:48:39 2019 -0400
@@ -127,7 +127,7 @@
     val = 1
     while value >= val:
         if value & val:
-            flags.append(namemap.get(val, '<unknown 0x%02x>' % val))
+            flags.append(namemap.get(val, b'<unknown 0x%02x>' % val))
         val <<= 1
 
     return b'|'.join(flags)
@@ -158,15 +158,15 @@
 
     @encoding.strmethod
     def __repr__(self):
-        typename = '<unknown 0x%02x>' % self.typeid
+        typename = b'<unknown 0x%02x>' % self.typeid
         for name, value in FRAME_TYPES.iteritems():
             if value == self.typeid:
                 typename = name
                 break
 
         return (
-            'frame(size=%d; request=%d; stream=%d; streamflags=%s; '
-            'type=%s; flags=%s)'
+            b'frame(size=%d; request=%d; stream=%d; streamflags=%s; '
+            b'type=%s; flags=%s)'
             % (
                 len(self.payload),
                 self.requestid,
@@ -306,7 +306,7 @@
 
     if readcount != FRAME_HEADER_SIZE:
         raise error.Abort(
-            _('received incomplete frame: got %d bytes: %s')
+            _(b'received incomplete frame: got %d bytes: %s')
             % (readcount, header)
         )
 
@@ -315,7 +315,7 @@
     payload = fh.read(h.length)
     if len(payload) != h.length:
         raise error.Abort(
-            _('frame length error: expected %d; got %d')
+            _(b'frame length error: expected %d; got %d')
             % (h.length, len(payload))
         )
 
@@ -538,13 +538,13 @@
         # TODO look for localstr, other types here?
 
         if not isinstance(formatting, bytes):
-            raise ValueError('must use bytes formatting strings')
+            raise ValueError(b'must use bytes formatting strings')
         for arg in args:
             if not isinstance(arg, bytes):
-                raise ValueError('must use bytes for arguments')
+                raise ValueError(b'must use bytes for arguments')
         for label in labels:
             if not isinstance(label, bytes):
-                raise ValueError('must use bytes for labels')
+                raise ValueError(b'must use bytes for labels')
 
         # Formatting string must be ASCII.
         formatting = formatting.decode(r'ascii', r'replace').encode(r'ascii')
@@ -568,7 +568,7 @@
     payload = b''.join(cborutil.streamencode(atomdicts))
 
     if len(payload) > maxframesize:
-        raise ValueError('cannot encode data in a single frame')
+        raise ValueError(b'cannot encode data in a single frame')
 
     yield stream.makeframe(
         requestid=requestid,
@@ -710,7 +710,7 @@
     def __init__(self, ui, extraobjs):
         if extraobjs:
             raise error.Abort(
-                _('identity decoder received unexpected ' 'additional values')
+                _(b'identity decoder received unexpected ' b'additional values')
             )
 
     def decode(self, data):
@@ -744,7 +744,7 @@
 
         if extraobjs:
             raise error.Abort(
-                _('zlib decoder received unexpected ' 'additional values')
+                _(b'zlib decoder received unexpected ' b'additional values')
             )
 
         self._decompressor = zlib.decompressobj()
@@ -801,7 +801,7 @@
     def __init__(self, ui, extraobjs):
         if extraobjs:
             raise error.Abort(
-                _('zstd8mb decoder received unexpected ' 'additional values')
+                _(b'zstd8mb decoder received unexpected ' b'additional values')
             )
 
         super(zstd8mbdecoder, self).__init__(maxwindowsize=8 * 1048576)
@@ -872,7 +872,7 @@
         decoded from the stream encoding settings frame payloads.
         """
         if name not in STREAM_ENCODERS:
-            raise error.Abort(_('unknown stream decoder: %s') % name)
+            raise error.Abort(_(b'unknown stream decoder: %s') % name)
 
         self._decoder = STREAM_ENCODERS[name][1](ui, extraobjs)
 
@@ -906,7 +906,7 @@
         Receives the stream profile name.
         """
         if name not in STREAM_ENCODERS:
-            raise error.Abort(_('unknown stream encoder: %s') % name)
+            raise error.Abort(_(b'unknown stream encoder: %s') % name)
 
         self._encoder = STREAM_ENCODERS[name][0](ui)
         self._encodername = name
@@ -979,13 +979,13 @@
 def ensureserverstream(stream):
     if stream.streamid % 2:
         raise error.ProgrammingError(
-            'server should only write to even '
-            'numbered streams; %d is not even' % stream.streamid
+            b'server should only write to even '
+            b'numbered streams; %d is not even' % stream.streamid
         )
 
 
 DEFAULT_PROTOCOL_SETTINGS = {
-    'contentencodings': [b'identity'],
+    b'contentencodings': [b'identity'],
 }
 
 
@@ -1066,7 +1066,7 @@
         """
         self._ui = ui
         self._deferoutput = deferoutput
-        self._state = 'initial'
+        self._state = b'initial'
         self._nextoutgoingstreamid = 2
         self._bufferedframegens = []
         # stream id -> stream instance for all active streams from the client.
@@ -1093,19 +1093,19 @@
         if any, the consumer should take next.
         """
         if not frame.streamid % 2:
-            self._state = 'errored'
+            self._state = b'errored'
             return self._makeerrorresult(
-                _('received frame with even numbered stream ID: %d')
+                _(b'received frame with even numbered stream ID: %d')
                 % frame.streamid
             )
 
         if frame.streamid not in self._incomingstreams:
             if not frame.streamflags & STREAM_FLAG_BEGIN_STREAM:
-                self._state = 'errored'
+                self._state = b'errored'
                 return self._makeerrorresult(
                     _(
-                        'received frame on unknown inactive stream without '
-                        'beginning of stream flag set'
+                        b'received frame on unknown inactive stream without '
+                        b'beginning of stream flag set'
                     )
                 )
 
@@ -1113,25 +1113,25 @@
 
         if frame.streamflags & STREAM_FLAG_ENCODING_APPLIED:
             # TODO handle decoding frames
-            self._state = 'errored'
+            self._state = b'errored'
             raise error.ProgrammingError(
-                'support for decoding stream payloads ' 'not yet implemented'
+                b'support for decoding stream payloads ' b'not yet implemented'
             )
 
         if frame.streamflags & STREAM_FLAG_END_STREAM:
             del self._incomingstreams[frame.streamid]
 
         handlers = {
-            'initial': self._onframeinitial,
-            'protocol-settings-receiving': self._onframeprotocolsettings,
-            'idle': self._onframeidle,
-            'command-receiving': self._onframecommandreceiving,
-            'errored': self._onframeerrored,
+            b'initial': self._onframeinitial,
+            b'protocol-settings-receiving': self._onframeprotocolsettings,
+            b'idle': self._onframeidle,
+            b'command-receiving': self._onframecommandreceiving,
+            b'errored': self._onframeerrored,
         }
 
         meth = handlers.get(self._state)
         if not meth:
-            raise error.ProgrammingError('unhandled state: %s' % self._state)
+            raise error.ProgrammingError(b'unhandled state: %s' % self._state)
 
         return meth(frame)
 
@@ -1190,8 +1190,8 @@
                     for frame in createerrorframe(
                         stream,
                         requestid,
-                        '%s' % stringutil.forcebytestr(e),
-                        errtype='server',
+                        b'%s' % stringutil.forcebytestr(e),
+                        errtype=b'server',
                     ):
 
                         yield frame
@@ -1204,8 +1204,8 @@
                     if isinstance(o, wireprototypes.alternatelocationresponse):
                         if emitted:
                             raise error.ProgrammingError(
-                                'alternatelocationresponse seen after initial '
-                                'output object'
+                                b'alternatelocationresponse seen after initial '
+                                b'output object'
                             )
 
                         frame = stream.makestreamsettingsframe(requestid)
@@ -1222,7 +1222,7 @@
 
                     if alternatelocationsent:
                         raise error.ProgrammingError(
-                            'object follows alternatelocationresponse'
+                            b'object follows alternatelocationresponse'
                         )
 
                     if not emitted:
@@ -1266,7 +1266,7 @@
 
                 except Exception as e:
                     for frame in createerrorframe(
-                        stream, requestid, '%s' % e, errtype='server'
+                        stream, requestid, b'%s' % e, errtype=b'server'
                     ):
                         yield frame
 
@@ -1285,7 +1285,7 @@
         # TODO should we do anything about in-flight commands?
 
         if not self._deferoutput or not self._bufferedframegens:
-            return 'noop', {}
+            return b'noop', {}
 
         # If we buffered all our responses, emit those.
         def makegen():
@@ -1293,21 +1293,21 @@
                 for frame in gen:
                     yield frame
 
-        return 'sendframes', {'framegen': makegen(),}
+        return b'sendframes', {b'framegen': makegen(),}
 
     def _handlesendframes(self, framegen):
         if self._deferoutput:
             self._bufferedframegens.append(framegen)
-            return 'noop', {}
+            return b'noop', {}
         else:
-            return 'sendframes', {'framegen': framegen,}
+            return b'sendframes', {b'framegen': framegen,}
 
     def onservererror(self, stream, requestid, msg):
         ensureserverstream(stream)
 
         def sendframes():
             for frame in createerrorframe(
-                stream, requestid, msg, errtype='server'
+                stream, requestid, msg, errtype=b'server'
             ):
                 yield frame
 
@@ -1345,39 +1345,39 @@
         # Always use the *server's* preferred encoder over the client's,
         # as servers have more to lose from sub-optimal encoders being used.
         for name in STREAM_ENCODERS_ORDER:
-            if name in self._sendersettings['contentencodings']:
+            if name in self._sendersettings[b'contentencodings']:
                 s.setencoder(self._ui, name)
                 break
 
         return s
 
     def _makeerrorresult(self, msg):
-        return 'error', {'message': msg,}
+        return b'error', {b'message': msg,}
 
     def _makeruncommandresult(self, requestid):
         entry = self._receivingcommands[requestid]
 
-        if not entry['requestdone']:
-            self._state = 'errored'
+        if not entry[b'requestdone']:
+            self._state = b'errored'
             raise error.ProgrammingError(
-                'should not be called without ' 'requestdone set'
+                b'should not be called without ' b'requestdone set'
             )
 
         del self._receivingcommands[requestid]
 
         if self._receivingcommands:
-            self._state = 'command-receiving'
+            self._state = b'command-receiving'
         else:
-            self._state = 'idle'
+            self._state = b'idle'
 
         # Decode the payloads as CBOR.
-        entry['payload'].seek(0)
-        request = cborutil.decodeall(entry['payload'].getvalue())[0]
+        entry[b'payload'].seek(0)
+        request = cborutil.decodeall(entry[b'payload'].getvalue())[0]
 
         if b'name' not in request:
-            self._state = 'errored'
+            self._state = b'errored'
             return self._makeerrorresult(
-                _('command request missing "name" field')
+                _(b'command request missing "name" field')
             )
 
         if b'args' not in request:
@@ -1387,70 +1387,70 @@
         self._activecommands.add(requestid)
 
         return (
-            'runcommand',
+            b'runcommand',
             {
-                'requestid': requestid,
-                'command': request[b'name'],
-                'args': request[b'args'],
-                'redirect': request.get(b'redirect'),
-                'data': entry['data'].getvalue() if entry['data'] else None,
+                b'requestid': requestid,
+                b'command': request[b'name'],
+                b'args': request[b'args'],
+                b'redirect': request.get(b'redirect'),
+                b'data': entry[b'data'].getvalue() if entry[b'data'] else None,
             },
         )
 
     def _makewantframeresult(self):
-        return 'wantframe', {'state': self._state,}
+        return b'wantframe', {b'state': self._state,}
 
     def _validatecommandrequestframe(self, frame):
         new = frame.flags & FLAG_COMMAND_REQUEST_NEW
         continuation = frame.flags & FLAG_COMMAND_REQUEST_CONTINUATION
 
         if new and continuation:
-            self._state = 'errored'
+            self._state = b'errored'
             return self._makeerrorresult(
                 _(
-                    'received command request frame with both new and '
-                    'continuation flags set'
+                    b'received command request frame with both new and '
+                    b'continuation flags set'
                 )
             )
 
         if not new and not continuation:
-            self._state = 'errored'
+            self._state = b'errored'
             return self._makeerrorresult(
                 _(
-                    'received command request frame with neither new nor '
-                    'continuation flags set'
+                    b'received command request frame with neither new nor '
+                    b'continuation flags set'
                 )
             )
 
     def _onframeinitial(self, frame):
         # Called when we receive a frame when in the "initial" state.
         if frame.typeid == FRAME_TYPE_SENDER_PROTOCOL_SETTINGS:
-            self._state = 'protocol-settings-receiving'
+            self._state = b'protocol-settings-receiving'
             self._protocolsettingsdecoder = cborutil.bufferingdecoder()
             return self._onframeprotocolsettings(frame)
 
         elif frame.typeid == FRAME_TYPE_COMMAND_REQUEST:
-            self._state = 'idle'
+            self._state = b'idle'
             return self._onframeidle(frame)
 
         else:
-            self._state = 'errored'
+            self._state = b'errored'
             return self._makeerrorresult(
                 _(
-                    'expected sender protocol settings or command request '
-                    'frame; got %d'
+                    b'expected sender protocol settings or command request '
+                    b'frame; got %d'
                 )
                 % frame.typeid
             )
 
     def _onframeprotocolsettings(self, frame):
-        assert self._state == 'protocol-settings-receiving'
+        assert self._state == b'protocol-settings-receiving'
         assert self._protocolsettingsdecoder is not None
 
         if frame.typeid != FRAME_TYPE_SENDER_PROTOCOL_SETTINGS:
-            self._state = 'errored'
+            self._state = b'errored'
             return self._makeerrorresult(
-                _('expected sender protocol settings frame; got %d')
+                _(b'expected sender protocol settings frame; got %d')
                 % frame.typeid
             )
 
@@ -1458,20 +1458,20 @@
         eos = frame.flags & FLAG_SENDER_PROTOCOL_SETTINGS_EOS
 
         if more and eos:
-            self._state = 'errored'
+            self._state = b'errored'
             return self._makeerrorresult(
                 _(
-                    'sender protocol settings frame cannot have both '
-                    'continuation and end of stream flags set'
+                    b'sender protocol settings frame cannot have both '
+                    b'continuation and end of stream flags set'
                 )
             )
 
         if not more and not eos:
-            self._state = 'errored'
+            self._state = b'errored'
             return self._makeerrorresult(
                 _(
-                    'sender protocol settings frame must have continuation or '
-                    'end of stream flag set'
+                    b'sender protocol settings frame must have continuation or '
+                    b'end of stream flag set'
                 )
             )
 
@@ -1480,9 +1480,11 @@
         try:
             self._protocolsettingsdecoder.decode(frame.payload)
         except Exception as e:
-            self._state = 'errored'
+            self._state = b'errored'
             return self._makeerrorresult(
-                _('error decoding CBOR from sender protocol settings frame: %s')
+                _(
+                    b'error decoding CBOR from sender protocol settings frame: %s'
+                )
                 % stringutil.forcebytestr(e)
             )
 
@@ -1495,25 +1497,25 @@
         self._protocolsettingsdecoder = None
 
         if not decoded:
-            self._state = 'errored'
+            self._state = b'errored'
             return self._makeerrorresult(
-                _('sender protocol settings frame did not contain CBOR data')
+                _(b'sender protocol settings frame did not contain CBOR data')
             )
         elif len(decoded) > 1:
-            self._state = 'errored'
+            self._state = b'errored'
             return self._makeerrorresult(
                 _(
-                    'sender protocol settings frame contained multiple CBOR '
-                    'values'
+                    b'sender protocol settings frame contained multiple CBOR '
+                    b'values'
                 )
             )
 
         d = decoded[0]
 
         if b'contentencodings' in d:
-            self._sendersettings['contentencodings'] = d[b'contentencodings']
+            self._sendersettings[b'contentencodings'] = d[b'contentencodings']
 
-        self._state = 'idle'
+        self._state = b'idle'
 
         return self._makewantframeresult()
 
@@ -1521,9 +1523,9 @@
         # The only frame type that should be received in this state is a
         # command request.
         if frame.typeid != FRAME_TYPE_COMMAND_REQUEST:
-            self._state = 'errored'
+            self._state = b'errored'
             return self._makeerrorresult(
-                _('expected command request frame; got %d') % frame.typeid
+                _(b'expected command request frame; got %d') % frame.typeid
             )
 
         res = self._validatecommandrequestframe(frame)
@@ -1531,15 +1533,15 @@
             return res
 
         if frame.requestid in self._receivingcommands:
-            self._state = 'errored'
+            self._state = b'errored'
             return self._makeerrorresult(
-                _('request with ID %d already received') % frame.requestid
+                _(b'request with ID %d already received') % frame.requestid
             )
 
         if frame.requestid in self._activecommands:
-            self._state = 'errored'
+            self._state = b'errored'
             return self._makeerrorresult(
-                _('request with ID %d is already active') % frame.requestid
+                _(b'request with ID %d is already active') % frame.requestid
             )
 
         new = frame.flags & FLAG_COMMAND_REQUEST_NEW
@@ -1547,19 +1549,19 @@
         expectingdata = frame.flags & FLAG_COMMAND_REQUEST_EXPECT_DATA
 
         if not new:
-            self._state = 'errored'
+            self._state = b'errored'
             return self._makeerrorresult(
-                _('received command request frame without new flag set')
+                _(b'received command request frame without new flag set')
             )
 
         payload = util.bytesio()
         payload.write(frame.payload)
 
         self._receivingcommands[frame.requestid] = {
-            'payload': payload,
-            'data': None,
-            'requestdone': not moreframes,
-            'expectingdata': bool(expectingdata),
+            b'payload': payload,
+            b'data': None,
+            b'requestdone': not moreframes,
+            b'expectingdata': bool(expectingdata),
         }
 
         # This is the final frame for this request. Dispatch it.
@@ -1567,7 +1569,7 @@
             return self._makeruncommandresult(frame.requestid)
 
         assert moreframes or expectingdata
-        self._state = 'command-receiving'
+        self._state = b'command-receiving'
         return self._makewantframeresult()
 
     def _onframecommandreceiving(self, frame):
@@ -1583,16 +1585,16 @@
         # All other frames should be related to a command that is currently
         # receiving but is not active.
         if frame.requestid in self._activecommands:
-            self._state = 'errored'
+            self._state = b'errored'
             return self._makeerrorresult(
-                _('received frame for request that is still active: %d')
+                _(b'received frame for request that is still active: %d')
                 % frame.requestid
             )
 
         if frame.requestid not in self._receivingcommands:
-            self._state = 'errored'
+            self._state = b'errored'
             return self._makeerrorresult(
-                _('received frame for request that is not receiving: %d')
+                _(b'received frame for request that is not receiving: %d')
                 % frame.requestid
             )
 
@@ -1602,25 +1604,25 @@
             moreframes = frame.flags & FLAG_COMMAND_REQUEST_MORE_FRAMES
             expectingdata = bool(frame.flags & FLAG_COMMAND_REQUEST_EXPECT_DATA)
 
-            if entry['requestdone']:
-                self._state = 'errored'
+            if entry[b'requestdone']:
+                self._state = b'errored'
                 return self._makeerrorresult(
                     _(
-                        'received command request frame when request frames '
-                        'were supposedly done'
+                        b'received command request frame when request frames '
+                        b'were supposedly done'
                     )
                 )
 
-            if expectingdata != entry['expectingdata']:
-                self._state = 'errored'
+            if expectingdata != entry[b'expectingdata']:
+                self._state = b'errored'
                 return self._makeerrorresult(
-                    _('mismatch between expect data flag and previous frame')
+                    _(b'mismatch between expect data flag and previous frame')
                 )
 
-            entry['payload'].write(frame.payload)
+            entry[b'payload'].write(frame.payload)
 
             if not moreframes:
-                entry['requestdone'] = True
+                entry[b'requestdone'] = True
 
             if not moreframes and not expectingdata:
                 return self._makeruncommandresult(frame.requestid)
@@ -1628,45 +1630,45 @@
             return self._makewantframeresult()
 
         elif frame.typeid == FRAME_TYPE_COMMAND_DATA:
-            if not entry['expectingdata']:
-                self._state = 'errored'
+            if not entry[b'expectingdata']:
+                self._state = b'errored'
                 return self._makeerrorresult(
                     _(
-                        'received command data frame for request that is not '
-                        'expecting data: %d'
+                        b'received command data frame for request that is not '
+                        b'expecting data: %d'
                     )
                     % frame.requestid
                 )
 
-            if entry['data'] is None:
-                entry['data'] = util.bytesio()
+            if entry[b'data'] is None:
+                entry[b'data'] = util.bytesio()
 
             return self._handlecommanddataframe(frame, entry)
         else:
-            self._state = 'errored'
+            self._state = b'errored'
             return self._makeerrorresult(
-                _('received unexpected frame type: %d') % frame.typeid
+                _(b'received unexpected frame type: %d') % frame.typeid
             )
 
     def _handlecommanddataframe(self, frame, entry):
         assert frame.typeid == FRAME_TYPE_COMMAND_DATA
 
         # TODO support streaming data instead of buffering it.
-        entry['data'].write(frame.payload)
+        entry[b'data'].write(frame.payload)
 
         if frame.flags & FLAG_COMMAND_DATA_CONTINUATION:
             return self._makewantframeresult()
         elif frame.flags & FLAG_COMMAND_DATA_EOS:
-            entry['data'].seek(0)
+            entry[b'data'].seek(0)
             return self._makeruncommandresult(frame.requestid)
         else:
-            self._state = 'errored'
+            self._state = b'errored'
             return self._makeerrorresult(
-                _('command data frame without ' 'flags')
+                _(b'command data frame without ' b'flags')
             )
 
     def _onframeerrored(self, frame):
-        return self._makeerrorresult(_('server already errored'))
+        return self._makeerrorresult(_(b'server already errored'))
 
 
 class commandrequest(object):
@@ -1678,7 +1680,7 @@
         self.args = args
         self.datafh = datafh
         self.redirect = redirect
-        self.state = 'pending'
+        self.state = b'pending'
 
 
 class clientreactor(object):
@@ -1778,7 +1780,7 @@
         Returns a 3-tuple of (request, action, action data).
         """
         if not self._canissuecommands:
-            raise error.ProgrammingError('cannot issue new commands')
+            raise error.ProgrammingError(b'cannot issue new commands')
 
         requestid = self._nextrequestid
         self._nextrequestid += 2
@@ -1789,11 +1791,11 @@
 
         if self._buffersends:
             self._pendingrequests.append(request)
-            return request, 'noop', {}
+            return request, b'noop', {}
         else:
             if not self._cansend:
                 raise error.ProgrammingError(
-                    'sends cannot be performed on ' 'this instance'
+                    b'sends cannot be performed on ' b'this instance'
                 )
 
             if not self._hasmultiplesend:
@@ -1802,8 +1804,8 @@
 
             return (
                 request,
-                'sendframes',
-                {'framegen': self._makecommandframes(request),},
+                b'sendframes',
+                {b'framegen': self._makecommandframes(request),},
             )
 
     def flushcommands(self):
@@ -1817,11 +1819,11 @@
         requests are allowed after this is called.
         """
         if not self._pendingrequests:
-            return 'noop', {}
+            return b'noop', {}
 
         if not self._cansend:
             raise error.ProgrammingError(
-                'sends cannot be performed on this ' 'instance'
+                b'sends cannot be performed on this ' b'instance'
             )
 
         # If the instance only allows sending once, mark that we have fired
@@ -1836,7 +1838,7 @@
                 for frame in self._makecommandframes(request):
                     yield frame
 
-        return 'sendframes', {'framegen': makeframes(),}
+        return b'sendframes', {b'framegen': makeframes(),}
 
     def _makecommandframes(self, request):
         """Emit frames to issue a command request.
@@ -1845,7 +1847,7 @@
         state.
         """
         self._activerequests[request.requestid] = request
-        request.state = 'sending'
+        request.state = b'sending'
 
         if not self._protocolsettingssent and self._clientcontentencoders:
             self._protocolsettingssent = True
@@ -1875,7 +1877,7 @@
         for frame in res:
             yield frame
 
-        request.state = 'sent'
+        request.state = b'sent'
 
     def onframerecv(self, frame):
         """Process a frame that has been received off the wire.
@@ -1885,10 +1887,10 @@
         """
         if frame.streamid % 2:
             return (
-                'error',
+                b'error',
                 {
-                    'message': (
-                        _('received frame with odd numbered stream ID: %d')
+                    b'message': (
+                        _(b'received frame with odd numbered stream ID: %d')
                         % frame.streamid
                     ),
                 },
@@ -1897,11 +1899,11 @@
         if frame.streamid not in self._incomingstreams:
             if not frame.streamflags & STREAM_FLAG_BEGIN_STREAM:
                 return (
-                    'error',
+                    b'error',
                     {
-                        'message': _(
-                            'received frame on unknown stream '
-                            'without beginning of stream flag set'
+                        b'message': _(
+                            b'received frame on unknown stream '
+                            b'without beginning of stream flag set'
                         ),
                     },
                 )
@@ -1924,17 +1926,17 @@
 
         if frame.requestid not in self._activerequests:
             return (
-                'error',
+                b'error',
                 {
-                    'message': (
-                        _('received frame for inactive request ID: %d')
+                    b'message': (
+                        _(b'received frame for inactive request ID: %d')
                         % frame.requestid
                     ),
                 },
             )
 
         request = self._activerequests[frame.requestid]
-        request.state = 'receiving'
+        request.state = b'receiving'
 
         handlers = {
             FRAME_TYPE_COMMAND_RESPONSE: self._oncommandresponseframe,
@@ -1944,7 +1946,7 @@
         meth = handlers.get(frame.typeid)
         if not meth:
             raise error.ProgrammingError(
-                'unhandled frame type: %d' % frame.typeid
+                b'unhandled frame type: %d' % frame.typeid
             )
 
         return meth(request, frame)
@@ -1957,12 +1959,12 @@
 
         if more and eos:
             return (
-                'error',
+                b'error',
                 {
-                    'message': (
+                    b'message': (
                         _(
-                            'stream encoding settings frame cannot have both '
-                            'continuation and end of stream flags set'
+                            b'stream encoding settings frame cannot have both '
+                            b'continuation and end of stream flags set'
                         )
                     ),
                 },
@@ -1970,11 +1972,11 @@
 
         if not more and not eos:
             return (
-                'error',
+                b'error',
                 {
-                    'message': _(
-                        'stream encoding settings frame must have '
-                        'continuation or end of stream flag set'
+                    b'message': _(
+                        b'stream encoding settings frame must have '
+                        b'continuation or end of stream flag set'
                     ),
                 },
             )
@@ -1989,12 +1991,12 @@
             decoder.decode(frame.payload)
         except Exception as e:
             return (
-                'error',
+                b'error',
                 {
-                    'message': (
+                    b'message': (
                         _(
-                            'error decoding CBOR from stream encoding '
-                            'settings frame: %s'
+                            b'error decoding CBOR from stream encoding '
+                            b'settings frame: %s'
                         )
                         % stringutil.forcebytestr(e)
                     ),
@@ -2002,7 +2004,7 @@
             )
 
         if more:
-            return 'noop', {}
+            return b'noop', {}
 
         assert eos
 
@@ -2011,11 +2013,11 @@
 
         if not decoded:
             return (
-                'error',
+                b'error',
                 {
-                    'message': _(
-                        'stream encoding settings frame did not contain '
-                        'CBOR data'
+                    b'message': _(
+                        b'stream encoding settings frame did not contain '
+                        b'CBOR data'
                     ),
                 },
             )
@@ -2026,40 +2028,44 @@
             )
         except Exception as e:
             return (
-                'error',
+                b'error',
                 {
-                    'message': (
-                        _('error setting stream decoder: %s')
+                    b'message': (
+                        _(b'error setting stream decoder: %s')
                         % stringutil.forcebytestr(e)
                     ),
                 },
             )
 
-        return 'noop', {}
+        return b'noop', {}
 
     def _oncommandresponseframe(self, request, frame):
         if frame.flags & FLAG_COMMAND_RESPONSE_EOS:
-            request.state = 'received'
+            request.state = b'received'
             del self._activerequests[request.requestid]
 
         return (
-            'responsedata',
+            b'responsedata',
             {
-                'request': request,
-                'expectmore': frame.flags & FLAG_COMMAND_RESPONSE_CONTINUATION,
-                'eos': frame.flags & FLAG_COMMAND_RESPONSE_EOS,
-                'data': frame.payload,
+                b'request': request,
+                b'expectmore': frame.flags & FLAG_COMMAND_RESPONSE_CONTINUATION,
+                b'eos': frame.flags & FLAG_COMMAND_RESPONSE_EOS,
+                b'data': frame.payload,
             },
         )
 
     def _onerrorresponseframe(self, request, frame):
-        request.state = 'errored'
+        request.state = b'errored'
         del self._activerequests[request.requestid]
 
         # The payload should be a CBOR map.
         m = cborutil.decodeall(frame.payload)[0]
 
         return (
-            'error',
-            {'request': request, 'type': m['type'], 'message': m['message'],},
+            b'error',
+            {
+                b'request': request,
+                b'type': m[b'type'],
+                b'message': m[b'message'],
+            },
         )
--- a/mercurial/wireprotoserver.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/wireprotoserver.py	Sun Oct 06 09:48:39 2019 -0400
@@ -34,9 +34,9 @@
 
 HTTP_OK = 200
 
-HGTYPE = 'application/mercurial-0.1'
-HGTYPE2 = 'application/mercurial-0.2'
-HGERRTYPE = 'application/hg-error'
+HGTYPE = b'application/mercurial-0.1'
+HGTYPE2 = b'application/mercurial-0.2'
+HGERRTYPE = b'application/hg-error'
 
 SSHV1 = wireprototypes.SSHV1
 SSHV2 = wireprototypes.SSHV2
@@ -56,7 +56,7 @@
         chunks.append(pycompat.bytesurl(v))
         i += 1
 
-    return ''.join(chunks)
+    return b''.join(chunks)
 
 
 @interfaceutil.implementer(wireprototypes.baseprotocolhandler)
@@ -69,19 +69,19 @@
 
     @property
     def name(self):
-        return 'http-v1'
+        return b'http-v1'
 
     def getargs(self, args):
         knownargs = self._args()
         data = {}
         keys = args.split()
         for k in keys:
-            if k == '*':
+            if k == b'*':
                 star = {}
                 for key in knownargs.keys():
-                    if key != 'cmd' and key not in keys:
+                    if key != b'cmd' and key not in keys:
                         star[key] = knownargs[key][0]
-                data['*'] = star
+                data[b'*'] = star
             else:
                 data[k] = knownargs[k][0]
         return [data[k] for k in keys]
@@ -104,7 +104,7 @@
     def getprotocaps(self):
         if self._protocaps is None:
             value = decodevaluefromheaders(self._req, b'X-HgProto')
-            self._protocaps = set(value.split(' '))
+            self._protocaps = set(value.split(b' '))
         return self._protocaps
 
     def getpayload(self):
@@ -132,33 +132,33 @@
             self._ui.ferr = olderr
 
     def client(self):
-        return 'remote:%s:%s:%s' % (
+        return b'remote:%s:%s:%s' % (
             self._req.urlscheme,
-            urlreq.quote(self._req.remotehost or ''),
-            urlreq.quote(self._req.remoteuser or ''),
+            urlreq.quote(self._req.remotehost or b''),
+            urlreq.quote(self._req.remoteuser or b''),
         )
 
     def addcapabilities(self, repo, caps):
         caps.append(b'batch')
 
         caps.append(
-            'httpheader=%d' % repo.ui.configint('server', 'maxhttpheaderlen')
+            b'httpheader=%d' % repo.ui.configint(b'server', b'maxhttpheaderlen')
         )
-        if repo.ui.configbool('experimental', 'httppostargs'):
-            caps.append('httppostargs')
+        if repo.ui.configbool(b'experimental', b'httppostargs'):
+            caps.append(b'httppostargs')
 
         # FUTURE advertise 0.2rx once support is implemented
         # FUTURE advertise minrx and mintx after consulting config option
-        caps.append('httpmediatype=0.1rx,0.1tx,0.2tx')
+        caps.append(b'httpmediatype=0.1rx,0.1tx,0.2tx')
 
         compengines = wireprototypes.supportedcompengines(
             repo.ui, compression.SERVERROLE
         )
         if compengines:
-            comptypes = ','.join(
+            comptypes = b','.join(
                 urlreq.quote(e.wireprotosupport().name) for e in compengines
             )
-            caps.append('compression=%s' % comptypes)
+            caps.append(b'compression=%s' % comptypes)
 
         return caps
 
@@ -194,10 +194,10 @@
     # HTTP version 1 wire protocol requests are denoted by a "cmd" query
     # string parameter. If it isn't present, this isn't a wire protocol
     # request.
-    if 'cmd' not in req.qsparams:
+    if b'cmd' not in req.qsparams:
         return False
 
-    cmd = req.qsparams['cmd']
+    cmd = req.qsparams[b'cmd']
 
     # The "cmd" request parameter is used by both the wire protocol and hgweb.
     # While not all wire protocol commands are available for all transports,
@@ -215,10 +215,10 @@
     # in this case. We send an HTTP 404 for backwards compatibility reasons.
     if req.dispatchpath:
         res.status = hgwebcommon.statusmessage(404)
-        res.headers['Content-Type'] = HGTYPE
+        res.headers[b'Content-Type'] = HGTYPE
         # TODO This is not a good response to issue for this request. This
         # is mostly for BC for now.
-        res.setbodybytes('0\n%s\n' % b'Not Found')
+        res.setbodybytes(b'0\n%s\n' % b'Not Found')
         return True
 
     proto = httpv1protocolhandler(
@@ -237,7 +237,7 @@
         res.status = hgwebcommon.statusmessage(e.code, pycompat.bytestr(e))
         # TODO This response body assumes the failed command was
         # "unbundle." That assumption is not always valid.
-        res.setbodybytes('0\n%s\n' % pycompat.bytestr(e))
+        res.setbodybytes(b'0\n%s\n' % pycompat.bytestr(e))
 
     return True
 
@@ -248,7 +248,7 @@
     # Registered APIs are made available via config options of the name of
     # the protocol.
     for k, v in API_HANDLERS.items():
-        section, option = v['config']
+        section, option = v[b'config']
         if repo.ui.configbool(section, option):
             apis.add(k)
 
@@ -263,10 +263,10 @@
 
     # This whole URL space is experimental for now. But we want to
     # reserve the URL space. So, 404 all URLs if the feature isn't enabled.
-    if not repo.ui.configbool('experimental', 'web.apiserver'):
+    if not repo.ui.configbool(b'experimental', b'web.apiserver'):
         res.status = b'404 Not Found'
         res.headers[b'Content-Type'] = b'text/plain'
-        res.setbodybytes(_('Experimental API server endpoint not enabled'))
+        res.setbodybytes(_(b'Experimental API server endpoint not enabled'))
         return
 
     # The URL space is /api/<protocol>/*. The structure of URLs under varies
@@ -280,14 +280,14 @@
         res.headers[b'Content-Type'] = b'text/plain'
         lines = [
             _(
-                'APIs can be accessed at /api/<name>, where <name> can be '
-                'one of the following:\n'
+                b'APIs can be accessed at /api/<name>, where <name> can be '
+                b'one of the following:\n'
             )
         ]
         if availableapis:
             lines.extend(sorted(availableapis))
         else:
-            lines.append(_('(no available APIs)\n'))
+            lines.append(_(b'(no available APIs)\n'))
         res.setbodybytes(b'\n'.join(lines))
         return
 
@@ -297,7 +297,7 @@
         res.status = b'404 Not Found'
         res.headers[b'Content-Type'] = b'text/plain'
         res.setbodybytes(
-            _('Unknown API: %s\nKnown APIs: %s')
+            _(b'Unknown API: %s\nKnown APIs: %s')
             % (proto, b', '.join(sorted(availableapis)))
         )
         return
@@ -305,10 +305,10 @@
     if proto not in availableapis:
         res.status = b'404 Not Found'
         res.headers[b'Content-Type'] = b'text/plain'
-        res.setbodybytes(_('API %s not enabled\n') % proto)
+        res.setbodybytes(_(b'API %s not enabled\n') % proto)
         return
 
-    API_HANDLERS[proto]['handler'](
+    API_HANDLERS[proto][b'handler'](
         rctx, req, res, checkperm, req.dispatchparts[2:]
     )
 
@@ -326,9 +326,9 @@
 #    descriptor for this service. The response must be serializable to CBOR.
 API_HANDLERS = {
     wireprotov2server.HTTP_WIREPROTO_V2: {
-        'config': ('experimental', 'web.api.http-v2'),
-        'handler': wireprotov2server.handlehttpv2request,
-        'apidescriptor': wireprotov2server.httpv2apidescriptor,
+        b'config': (b'experimental', b'web.api.http-v2'),
+        b'handler': wireprotov2server.handlehttpv2request,
+        b'apidescriptor': wireprotov2server.httpv2apidescriptor,
     },
 }
 
@@ -341,7 +341,7 @@
     # Determine the response media type and compression engine based
     # on the request parameters.
 
-    if '0.2' in proto.getprotocaps():
+    if b'0.2' in proto.getprotocaps():
         # All clients are expected to support uncompressed data.
         if prefer_uncompressed:
             return HGTYPE2, compression._noopengine(), {}
@@ -353,9 +353,9 @@
         ):
             if engine.wireprotosupport().name in compformats:
                 opts = {}
-                level = ui.configint('server', '%slevel' % engine.name())
+                level = ui.configint(b'server', b'%slevel' % engine.name())
                 if level is not None:
-                    opts['level'] = level
+                    opts[b'level'] = level
 
                 return HGTYPE2, engine, opts
 
@@ -365,8 +365,8 @@
     # Don't allow untrusted settings because disabling compression or
     # setting a very high compression level could lead to flooding
     # the server's network or CPU.
-    opts = {'level': ui.configint('server', 'zliblevel')}
-    return HGTYPE, util.compengines['zlib'], opts
+    opts = {b'level': ui.configint(b'server', b'zliblevel')}
+    return HGTYPE, util.compengines[b'zlib'], opts
 
 
 def processcapabilitieshandshake(repo, req, res, proto):
@@ -377,7 +377,7 @@
     advertised services are available, we don't handle the request.
     """
     # Fall back to old behavior unless the API server is enabled.
-    if not repo.ui.configbool('experimental', 'web.apiserver'):
+    if not repo.ui.configbool(b'experimental', b'web.apiserver'):
         return False
 
     clientapis = decodevaluefromheaders(req, b'X-HgUpgrade')
@@ -386,7 +386,7 @@
         return False
 
     # We currently only support CBOR responses.
-    protocaps = set(protocaps.split(' '))
+    protocaps = set(protocaps.split(b' '))
     if b'cbor' not in protocaps:
         return False
 
@@ -395,20 +395,20 @@
     for api in sorted(set(clientapis.split()) & _availableapis(repo)):
         handler = API_HANDLERS[api]
 
-        descriptorfn = handler.get('apidescriptor')
+        descriptorfn = handler.get(b'apidescriptor')
         if not descriptorfn:
             continue
 
         descriptors[api] = descriptorfn(req, repo)
 
-    v1caps = wireprotov1server.dispatch(repo, proto, 'capabilities')
+    v1caps = wireprotov1server.dispatch(repo, proto, b'capabilities')
     assert isinstance(v1caps, wireprototypes.bytesresponse)
 
     m = {
         # TODO allow this to be configurable.
-        'apibase': 'api/',
-        'apis': descriptors,
-        'v1capabilities': v1caps.data,
+        b'apibase': b'api/',
+        b'apis': descriptors,
+        b'v1capabilities': v1caps.data,
     }
 
     res.status = b'200 OK'
@@ -427,7 +427,7 @@
         # identifying the compression engine.
         name = engine.wireprotosupport().name
         assert 0 < len(name) < 256
-        yield struct.pack('B', len(name))
+        yield struct.pack(b'B', len(name))
         yield name
 
         for chunk in gen:
@@ -435,11 +435,11 @@
 
     def setresponse(code, contenttype, bodybytes=None, bodygen=None):
         if code == HTTP_OK:
-            res.status = '200 Script output follows'
+            res.status = b'200 Script output follows'
         else:
             res.status = hgwebcommon.statusmessage(code)
 
-        res.headers['Content-Type'] = contenttype
+        res.headers[b'Content-Type'] = contenttype
 
         if bodybytes is not None:
             res.setbodybytes(bodybytes)
@@ -450,14 +450,17 @@
         setresponse(
             HTTP_OK,
             HGERRTYPE,
-            _('requested wire protocol command is not available over ' 'HTTP'),
+            _(
+                b'requested wire protocol command is not available over '
+                b'HTTP'
+            ),
         )
         return
 
     proto.checkperm(wireprotov1server.commands[cmd].permission)
 
     # Possibly handle a modern client wanting to switch protocols.
-    if cmd == 'capabilities' and processcapabilitieshandshake(
+    if cmd == b'capabilities' and processcapabilitieshandshake(
         repo, req, res, proto
     ):
 
@@ -486,21 +489,21 @@
 
         setresponse(HTTP_OK, mediatype, bodygen=gen)
     elif isinstance(rsp, wireprototypes.pushres):
-        rsp = '%d\n%s' % (rsp.res, rsp.output)
+        rsp = b'%d\n%s' % (rsp.res, rsp.output)
         setresponse(HTTP_OK, HGTYPE, bodybytes=rsp)
     elif isinstance(rsp, wireprototypes.pusherr):
-        rsp = '0\n%s\n' % rsp.res
+        rsp = b'0\n%s\n' % rsp.res
         res.drain = True
         setresponse(HTTP_OK, HGTYPE, bodybytes=rsp)
     elif isinstance(rsp, wireprototypes.ooberror):
         setresponse(HTTP_OK, HGERRTYPE, bodybytes=rsp.message)
     else:
-        raise error.ProgrammingError('hgweb.protocol internal failure', rsp)
+        raise error.ProgrammingError(b'hgweb.protocol internal failure', rsp)
 
 
 def _sshv1respondbytes(fout, value):
     """Send a bytes response for protocol version 1."""
-    fout.write('%d\n' % len(value))
+    fout.write(b'%d\n' % len(value))
     fout.write(value)
     fout.flush()
 
@@ -540,15 +543,15 @@
             argline = self._fin.readline()[:-1]
             arg, l = argline.split()
             if arg not in keys:
-                raise error.Abort(_("unexpected parameter %r") % arg)
-            if arg == '*':
+                raise error.Abort(_(b"unexpected parameter %r") % arg)
+            if arg == b'*':
                 star = {}
                 for k in pycompat.xrange(int(l)):
                     argline = self._fin.readline()[:-1]
                     arg, l = argline.split()
                     val = self._fin.read(int(l))
                     star[arg] = val
-                data['*'] = star
+                data[b'*'] = star
             else:
                 val = self._fin.read(int(l))
                 data[arg] = val
@@ -578,8 +581,8 @@
         yield None
 
     def client(self):
-        client = encoding.environ.get('SSH_CLIENT', '').split(' ', 1)[0]
-        return 'remote:ssh:' + client
+        client = encoding.environ.get(b'SSH_CLIENT', b'').split(b' ', 1)[0]
+        return b'remote:ssh:' + client
 
     def addcapabilities(self, repo, caps):
         if self.name == wireprototypes.SSHV1:
@@ -655,18 +658,18 @@
     #    Ths happens by default since protocol version 2 is the same as
     #    version 1 except for the handshake.
 
-    state = 'protov1-serving'
+    state = b'protov1-serving'
     proto = sshv1protocolhandler(ui, fin, fout)
     protoswitched = False
 
     while not ev.is_set():
-        if state == 'protov1-serving':
+        if state == b'protov1-serving':
             # Commands are issued on new lines.
             request = fin.readline()[:-1]
 
             # Empty lines signal to terminate the connection.
             if not request:
-                state = 'shutdown'
+                state = b'shutdown'
                 continue
 
             # It looks like a protocol upgrade request. Transition state to
@@ -678,10 +681,10 @@
                         ui.ferr,
                         b'cannot upgrade protocols multiple ' b'times',
                     )
-                    state = 'shutdown'
+                    state = b'shutdown'
                     continue
 
-                state = 'upgrade-initial'
+                state = b'upgrade-initial'
                 continue
 
             available = wireprotov1server.commands.commandavailable(
@@ -715,16 +718,16 @@
                 _sshv1respondooberror(fout, ui.ferr, rsp.message)
             else:
                 raise error.ProgrammingError(
-                    'unhandled response type from '
-                    'wire protocol command: %s' % rsp
+                    b'unhandled response type from '
+                    b'wire protocol command: %s' % rsp
                 )
 
         # For now, protocol version 2 serving just goes back to version 1.
-        elif state == 'protov2-serving':
-            state = 'protov1-serving'
+        elif state == b'protov2-serving':
+            state = b'protov1-serving'
             continue
 
-        elif state == 'upgrade-initial':
+        elif state == b'upgrade-initial':
             # We should never transition into this state if we've switched
             # protocols.
             assert not protoswitched
@@ -738,20 +741,20 @@
                 token, caps = request.split(b' ')[1:]
             except ValueError:
                 _sshv1respondbytes(fout, b'')
-                state = 'protov1-serving'
+                state = b'protov1-serving'
                 continue
 
             # Send empty response if we don't support upgrading protocols.
-            if not ui.configbool('experimental', 'sshserver.support-v2'):
+            if not ui.configbool(b'experimental', b'sshserver.support-v2'):
                 _sshv1respondbytes(fout, b'')
-                state = 'protov1-serving'
+                state = b'protov1-serving'
                 continue
 
             try:
                 caps = urlreq.parseqs(caps)
             except ValueError:
                 _sshv1respondbytes(fout, b'')
-                state = 'protov1-serving'
+                state = b'protov1-serving'
                 continue
 
             # We don't see an upgrade request to protocol version 2. Ignore
@@ -759,15 +762,15 @@
             wantedprotos = caps.get(b'proto', [b''])[0]
             if SSHV2 not in wantedprotos:
                 _sshv1respondbytes(fout, b'')
-                state = 'protov1-serving'
+                state = b'protov1-serving'
                 continue
 
             # It looks like we can honor this upgrade request to protocol 2.
             # Filter the rest of the handshake protocol request lines.
-            state = 'upgrade-v2-filter-legacy-handshake'
+            state = b'upgrade-v2-filter-legacy-handshake'
             continue
 
-        elif state == 'upgrade-v2-filter-legacy-handshake':
+        elif state == b'upgrade-v2-filter-legacy-handshake':
             # Client should have sent legacy handshake after an ``upgrade``
             # request. Expected lines:
             #
@@ -787,7 +790,7 @@
                         b'malformed handshake protocol: ' b'missing %s' % line,
                     )
                     ok = False
-                    state = 'shutdown'
+                    state = b'shutdown'
                     break
 
             if not ok:
@@ -801,13 +804,13 @@
                     b'malformed handshake protocol: '
                     b'missing between argument value',
                 )
-                state = 'shutdown'
+                state = b'shutdown'
                 continue
 
-            state = 'upgrade-v2-finish'
+            state = b'upgrade-v2-finish'
             continue
 
-        elif state == 'upgrade-v2-finish':
+        elif state == b'upgrade-v2-finish':
             # Send the upgrade response.
             fout.write(b'upgraded %s %s\n' % (token, SSHV2))
             servercaps = wireprotov1server.capabilities(repo, proto)
@@ -818,15 +821,15 @@
             proto = sshv2protocolhandler(ui, fin, fout)
             protoswitched = True
 
-            state = 'protov2-serving'
+            state = b'protov2-serving'
             continue
 
-        elif state == 'shutdown':
+        elif state == b'shutdown':
             break
 
         else:
             raise error.ProgrammingError(
-                'unhandled ssh server state: %s' % state
+                b'unhandled ssh server state: %s' % state
             )
 
 
@@ -839,10 +842,10 @@
         # Log write I/O to stdout and stderr if configured.
         if logfh:
             self._fout = util.makeloggingfileobject(
-                logfh, self._fout, 'o', logdata=True
+                logfh, self._fout, b'o', logdata=True
             )
             ui.ferr = util.makeloggingfileobject(
-                logfh, ui.ferr, 'e', logdata=True
+                logfh, ui.ferr, b'e', logdata=True
             )
 
     def serve_forever(self):
--- a/mercurial/wireprototypes.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/wireprototypes.py	Sun Oct 06 09:48:39 2019 -0400
@@ -19,27 +19,27 @@
 from .utils import compression
 
 # Names of the SSH protocol implementations.
-SSHV1 = 'ssh-v1'
+SSHV1 = b'ssh-v1'
 # These are advertised over the wire. Increment the counters at the end
 # to reflect BC breakages.
-SSHV2 = 'exp-ssh-v2-0003'
-HTTP_WIREPROTO_V2 = 'exp-http-v2-0003'
+SSHV2 = b'exp-ssh-v2-0003'
+HTTP_WIREPROTO_V2 = b'exp-http-v2-0003'
 
-NARROWCAP = 'exp-narrow-1'
-ELLIPSESCAP1 = 'exp-ellipses-1'
-ELLIPSESCAP = 'exp-ellipses-2'
+NARROWCAP = b'exp-narrow-1'
+ELLIPSESCAP1 = b'exp-ellipses-1'
+ELLIPSESCAP = b'exp-ellipses-2'
 SUPPORTED_ELLIPSESCAP = (ELLIPSESCAP1, ELLIPSESCAP)
 
 # All available wire protocol transports.
 TRANSPORTS = {
-    SSHV1: {'transport': 'ssh', 'version': 1,},
+    SSHV1: {b'transport': b'ssh', b'version': 1,},
     SSHV2: {
-        'transport': 'ssh',
+        b'transport': b'ssh',
         # TODO mark as version 2 once all commands are implemented.
-        'version': 1,
+        b'version': 1,
     },
-    'http-v1': {'transport': 'http', 'version': 1,},
-    HTTP_WIREPROTO_V2: {'transport': 'http', 'version': 2,},
+    b'http-v1': {b'transport': b'http', b'version': 1,},
+    HTTP_WIREPROTO_V2: {b'transport': b'http', b'version': 2,},
 }
 
 
@@ -116,13 +116,13 @@
 
 
 # list of nodes encoding / decoding
-def decodelist(l, sep=' '):
+def decodelist(l, sep=b' '):
     if l:
         return [bin(v) for v in l.split(sep)]
     return []
 
 
-def encodelist(l, sep=' '):
+def encodelist(l, sep=b' '):
     try:
         return sep.join(map(hex, l))
     except TypeError:
@@ -134,19 +134,19 @@
 
 def escapebatcharg(plain):
     return (
-        plain.replace(':', ':c')
-        .replace(',', ':o')
-        .replace(';', ':s')
-        .replace('=', ':e')
+        plain.replace(b':', b':c')
+        .replace(b',', b':o')
+        .replace(b';', b':s')
+        .replace(b'=', b':e')
     )
 
 
 def unescapebatcharg(escaped):
     return (
-        escaped.replace(':e', '=')
-        .replace(':s', ';')
-        .replace(':o', ',')
-        .replace(':c', ':')
+        escaped.replace(b':e', b'=')
+        .replace(b':s', b';')
+        .replace(b':o', b',')
+        .replace(b':c', b':')
     )
 
 
@@ -162,18 +162,18 @@
 # :scsv:  set of values, transmitted as comma-separated values
 # :plain: string with no transformation needed.
 GETBUNDLE_ARGUMENTS = {
-    'heads': 'nodes',
-    'bookmarks': 'boolean',
-    'common': 'nodes',
-    'obsmarkers': 'boolean',
-    'phases': 'boolean',
-    'bundlecaps': 'scsv',
-    'listkeys': 'csv',
-    'cg': 'boolean',
-    'cbattempted': 'boolean',
-    'stream': 'boolean',
-    'includepats': 'csv',
-    'excludepats': 'csv',
+    b'heads': b'nodes',
+    b'bookmarks': b'boolean',
+    b'common': b'nodes',
+    b'obsmarkers': b'boolean',
+    b'phases': b'boolean',
+    b'bundlecaps': b'scsv',
+    b'listkeys': b'csv',
+    b'cg': b'boolean',
+    b'cbattempted': b'boolean',
+    b'stream': b'boolean',
+    b'includepats': b'csv',
+    b'excludepats': b'csv',
 }
 
 
@@ -253,9 +253,9 @@
     def __init__(
         self,
         func,
-        args='',
+        args=b'',
         transports=None,
-        permission='push',
+        permission=b'push',
         cachekeyfn=None,
         extracapabilitiesfn=None,
     ):
@@ -292,7 +292,7 @@
         elif i == 1:
             return self.args
         else:
-            raise IndexError('can only access elements 0 and 1')
+            raise IndexError(b'can only access elements 0 and 1')
 
 
 class commanddict(dict):
@@ -308,7 +308,7 @@
         # Cast 2-tuples to commandentry instances.
         elif isinstance(v, tuple):
             if len(v) != 2:
-                raise ValueError('command tuples must have exactly 2 elements')
+                raise ValueError(b'command tuples must have exactly 2 elements')
 
             # It is common for extensions to wrap wire protocol commands via
             # e.g. ``wireproto.commands[x] = (newfn, args)``. Because callers
@@ -322,11 +322,12 @@
                     v[0],
                     args=v[1],
                     transports=set(TRANSPORTS),
-                    permission='push',
+                    permission=b'push',
                 )
         else:
             raise ValueError(
-                'command entries must be commandentry instances ' 'or 2-tuples'
+                b'command entries must be commandentry instances '
+                b'or 2-tuples'
             )
 
         return super(commanddict, self).__setitem__(k, v)
@@ -354,8 +355,8 @@
 
     # Allow config to override default list and ordering.
     if role == compression.SERVERROLE:
-        configengines = ui.configlist('server', 'compressionengines')
-        config = 'server.compressionengines'
+        configengines = ui.configlist(b'server', b'compressionengines')
+        config = b'server.compressionengines'
     else:
         # This is currently implemented mainly to facilitate testing. In most
         # cases, the server should be in charge of choosing a compression engine
@@ -363,14 +364,16 @@
         # CPU DoS due to an expensive engine or a network DoS due to poor
         # compression ratio).
         configengines = ui.configlist(
-            'experimental', 'clientcompressionengines'
+            b'experimental', b'clientcompressionengines'
         )
-        config = 'experimental.clientcompressionengines'
+        config = b'experimental.clientcompressionengines'
 
     # No explicit config. Filter out the ones that aren't supposed to be
     # advertised and return default ordering.
     if not configengines:
-        attr = 'serverpriority' if role == util.SERVERROLE else 'clientpriority'
+        attr = (
+            b'serverpriority' if role == util.SERVERROLE else b'clientpriority'
+        )
         return [
             e for e in compengines if getattr(e.wireprotosupport(), attr) > 0
         ]
@@ -383,8 +386,8 @@
     invalidnames = set(e for e in configengines if e not in validnames)
     if invalidnames:
         raise error.Abort(
-            _('invalid compression engine defined in %s: %s')
-            % (config, ', '.join(sorted(invalidnames)))
+            _(b'invalid compression engine defined in %s: %s')
+            % (config, b', '.join(sorted(invalidnames)))
         )
 
     compengines = [e for e in compengines if e.name() in configengines]
@@ -395,11 +398,12 @@
     if not compengines:
         raise error.Abort(
             _(
-                '%s config option does not specify any known '
-                'compression engines'
+                b'%s config option does not specify any known '
+                b'compression engines'
             )
             % config,
-            hint=_('usable compression engines: %s') % ', '.sorted(validnames),
+            hint=_(b'usable compression engines: %s')
+            % b', '.sorted(validnames),
         )
 
     return compengines
--- a/mercurial/wireprotov1peer.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/wireprotov1peer.py	Sun Oct 06 09:48:39 2019 -0400
@@ -73,8 +73,8 @@
     '''placeholder for a value to be set later'''
 
     def set(self, value):
-        if util.safehasattr(self, 'value'):
-            raise error.RepoError("future is already set")
+        if util.safehasattr(self, b'value'):
+            raise error.RepoError(b"future is already set")
         self.value = value
 
 
@@ -89,13 +89,13 @@
         # servers.
         assert all(escapearg(k) == k for k in argsdict)
 
-        args = ','.join(
-            '%s=%s' % (escapearg(k), escapearg(v))
+        args = b','.join(
+            b'%s=%s' % (escapearg(k), escapearg(v))
             for k, v in argsdict.iteritems()
         )
-        cmds.append('%s %s' % (op, args))
+        cmds.append(b'%s %s' % (op, args))
 
-    return ';'.join(cmds)
+    return b';'.join(cmds)
 
 
 class unsentfuture(pycompat.futures.Future):
@@ -139,12 +139,12 @@
     def callcommand(self, command, args):
         if self._sent:
             raise error.ProgrammingError(
-                'callcommand() cannot be used ' 'after commands are sent'
+                b'callcommand() cannot be used ' b'after commands are sent'
             )
 
         if self._closed:
             raise error.ProgrammingError(
-                'callcommand() cannot be used ' 'after close()'
+                b'callcommand() cannot be used ' b'after close()'
             )
 
         # Commands are dispatched through methods on the peer.
@@ -152,8 +152,8 @@
 
         if not fn:
             raise error.ProgrammingError(
-                'cannot call command %s: method of same name not available '
-                'on peer' % command
+                b'cannot call command %s: method of same name not available '
+                b'on peer' % command
             )
 
         # Commands are either batchable or they aren't. If a command
@@ -179,8 +179,8 @@
         else:
             if self._calls:
                 raise error.ProgrammingError(
-                    '%s is not batchable and cannot be called on a command '
-                    'executor along with other commands' % command
+                    b'%s is not batchable and cannot be called on a command '
+                    b'executor along with other commands' % command
                 )
 
             f = addcall()
@@ -306,7 +306,7 @@
                 if not f.done():
                     f.set_exception(
                         error.ResponseError(
-                            _('unfulfilled batch command response')
+                            _(b'unfulfilled batch command response')
                         )
                     )
 
@@ -348,16 +348,16 @@
     # Begin of ipeercommands interface.
 
     def clonebundles(self):
-        self.requirecap('clonebundles', _('clone bundles'))
-        return self._call('clonebundles')
+        self.requirecap(b'clonebundles', _(b'clone bundles'))
+        return self._call(b'clonebundles')
 
     @batchable
     def lookup(self, key):
-        self.requirecap('lookup', _('look up remote revision'))
+        self.requirecap(b'lookup', _(b'look up remote revision'))
         f = future()
-        yield {'key': encoding.fromlocal(key)}, f
+        yield {b'key': encoding.fromlocal(key)}, f
         d = f.value
-        success, data = d[:-1].split(" ", 1)
+        success, data = d[:-1].split(b" ", 1)
         if int(success):
             yield bin(data)
         else:
@@ -371,17 +371,17 @@
         try:
             yield wireprototypes.decodelist(d[:-1])
         except ValueError:
-            self._abort(error.ResponseError(_("unexpected response:"), d))
+            self._abort(error.ResponseError(_(b"unexpected response:"), d))
 
     @batchable
     def known(self, nodes):
         f = future()
-        yield {'nodes': wireprototypes.encodelist(nodes)}, f
+        yield {b'nodes': wireprototypes.encodelist(nodes)}, f
         d = f.value
         try:
             yield [bool(int(b)) for b in pycompat.iterbytestr(d)]
         except ValueError:
-            self._abort(error.ResponseError(_("unexpected response:"), d))
+            self._abort(error.ResponseError(_(b"unexpected response:"), d))
 
     @batchable
     def branchmap(self):
@@ -391,83 +391,83 @@
         try:
             branchmap = {}
             for branchpart in d.splitlines():
-                branchname, branchheads = branchpart.split(' ', 1)
+                branchname, branchheads = branchpart.split(b' ', 1)
                 branchname = encoding.tolocal(urlreq.unquote(branchname))
                 branchheads = wireprototypes.decodelist(branchheads)
                 branchmap[branchname] = branchheads
             yield branchmap
         except TypeError:
-            self._abort(error.ResponseError(_("unexpected response:"), d))
+            self._abort(error.ResponseError(_(b"unexpected response:"), d))
 
     @batchable
     def listkeys(self, namespace):
-        if not self.capable('pushkey'):
+        if not self.capable(b'pushkey'):
             yield {}, None
         f = future()
-        self.ui.debug('preparing listkeys for "%s"\n' % namespace)
-        yield {'namespace': encoding.fromlocal(namespace)}, f
+        self.ui.debug(b'preparing listkeys for "%s"\n' % namespace)
+        yield {b'namespace': encoding.fromlocal(namespace)}, f
         d = f.value
         self.ui.debug(
-            'received listkey for "%s": %i bytes\n' % (namespace, len(d))
+            b'received listkey for "%s": %i bytes\n' % (namespace, len(d))
         )
         yield pushkeymod.decodekeys(d)
 
     @batchable
     def pushkey(self, namespace, key, old, new):
-        if not self.capable('pushkey'):
+        if not self.capable(b'pushkey'):
             yield False, None
         f = future()
-        self.ui.debug('preparing pushkey for "%s:%s"\n' % (namespace, key))
+        self.ui.debug(b'preparing pushkey for "%s:%s"\n' % (namespace, key))
         yield {
-            'namespace': encoding.fromlocal(namespace),
-            'key': encoding.fromlocal(key),
-            'old': encoding.fromlocal(old),
-            'new': encoding.fromlocal(new),
+            b'namespace': encoding.fromlocal(namespace),
+            b'key': encoding.fromlocal(key),
+            b'old': encoding.fromlocal(old),
+            b'new': encoding.fromlocal(new),
         }, f
         d = f.value
-        d, output = d.split('\n', 1)
+        d, output = d.split(b'\n', 1)
         try:
             d = bool(int(d))
         except ValueError:
             raise error.ResponseError(
-                _('push failed (unexpected response):'), d
+                _(b'push failed (unexpected response):'), d
             )
         for l in output.splitlines(True):
-            self.ui.status(_('remote: '), l)
+            self.ui.status(_(b'remote: '), l)
         yield d
 
     def stream_out(self):
-        return self._callstream('stream_out')
+        return self._callstream(b'stream_out')
 
     def getbundle(self, source, **kwargs):
         kwargs = pycompat.byteskwargs(kwargs)
-        self.requirecap('getbundle', _('look up remote changes'))
+        self.requirecap(b'getbundle', _(b'look up remote changes'))
         opts = {}
-        bundlecaps = kwargs.get('bundlecaps') or set()
+        bundlecaps = kwargs.get(b'bundlecaps') or set()
         for key, value in kwargs.iteritems():
             if value is None:
                 continue
             keytype = wireprototypes.GETBUNDLE_ARGUMENTS.get(key)
             if keytype is None:
                 raise error.ProgrammingError(
-                    'Unexpectedly None keytype for key %s' % key
+                    b'Unexpectedly None keytype for key %s' % key
                 )
-            elif keytype == 'nodes':
+            elif keytype == b'nodes':
                 value = wireprototypes.encodelist(value)
-            elif keytype == 'csv':
-                value = ','.join(value)
-            elif keytype == 'scsv':
-                value = ','.join(sorted(value))
-            elif keytype == 'boolean':
-                value = '%i' % bool(value)
-            elif keytype != 'plain':
-                raise KeyError('unknown getbundle option type %s' % keytype)
+            elif keytype == b'csv':
+                value = b','.join(value)
+            elif keytype == b'scsv':
+                value = b','.join(sorted(value))
+            elif keytype == b'boolean':
+                value = b'%i' % bool(value)
+            elif keytype != b'plain':
+                raise KeyError(b'unknown getbundle option type %s' % keytype)
             opts[key] = value
-        f = self._callcompressable("getbundle", **pycompat.strkwargs(opts))
-        if any((cap.startswith('HG2') for cap in bundlecaps)):
+        f = self._callcompressable(b"getbundle", **pycompat.strkwargs(opts))
+        if any((cap.startswith(b'HG2') for cap in bundlecaps)):
             return bundle2.getunbundler(self.ui, f)
         else:
-            return changegroupmod.cg1unpacker(f, 'UN')
+            return changegroupmod.cg1unpacker(f, b'UN')
 
     def unbundle(self, bundle, heads, url):
         '''Send cg (a readable file-like object representing the
@@ -483,30 +483,30 @@
         visible to hooks.
         '''
 
-        if heads != ['force'] and self.capable('unbundlehash'):
+        if heads != [b'force'] and self.capable(b'unbundlehash'):
             heads = wireprototypes.encodelist(
-                ['hashed', hashlib.sha1(''.join(sorted(heads))).digest()]
+                [b'hashed', hashlib.sha1(b''.join(sorted(heads))).digest()]
             )
         else:
             heads = wireprototypes.encodelist(heads)
 
-        if util.safehasattr(bundle, 'deltaheader'):
+        if util.safehasattr(bundle, b'deltaheader'):
             # this a bundle10, do the old style call sequence
-            ret, output = self._callpush("unbundle", bundle, heads=heads)
-            if ret == "":
-                raise error.ResponseError(_('push failed:'), output)
+            ret, output = self._callpush(b"unbundle", bundle, heads=heads)
+            if ret == b"":
+                raise error.ResponseError(_(b'push failed:'), output)
             try:
                 ret = int(ret)
             except ValueError:
                 raise error.ResponseError(
-                    _('push failed (unexpected response):'), ret
+                    _(b'push failed (unexpected response):'), ret
                 )
 
             for l in output.splitlines(True):
-                self.ui.status(_('remote: '), l)
+                self.ui.status(_(b'remote: '), l)
         else:
             # bundle2 push. Send a stream, fetch a stream.
-            stream = self._calltwowaystream('unbundle', bundle, heads=heads)
+            stream = self._calltwowaystream(b'unbundle', bundle, heads=heads)
             ret = bundle2.getunbundler(self.ui, stream)
         return ret
 
@@ -516,46 +516,46 @@
 
     def branches(self, nodes):
         n = wireprototypes.encodelist(nodes)
-        d = self._call("branches", nodes=n)
+        d = self._call(b"branches", nodes=n)
         try:
             br = [tuple(wireprototypes.decodelist(b)) for b in d.splitlines()]
             return br
         except ValueError:
-            self._abort(error.ResponseError(_("unexpected response:"), d))
+            self._abort(error.ResponseError(_(b"unexpected response:"), d))
 
     def between(self, pairs):
         batch = 8  # avoid giant requests
         r = []
         for i in pycompat.xrange(0, len(pairs), batch):
-            n = " ".join(
+            n = b" ".join(
                 [
-                    wireprototypes.encodelist(p, '-')
+                    wireprototypes.encodelist(p, b'-')
                     for p in pairs[i : i + batch]
                 ]
             )
-            d = self._call("between", pairs=n)
+            d = self._call(b"between", pairs=n)
             try:
                 r.extend(
                     l and wireprototypes.decodelist(l) or []
                     for l in d.splitlines()
                 )
             except ValueError:
-                self._abort(error.ResponseError(_("unexpected response:"), d))
+                self._abort(error.ResponseError(_(b"unexpected response:"), d))
         return r
 
     def changegroup(self, nodes, source):
         n = wireprototypes.encodelist(nodes)
-        f = self._callcompressable("changegroup", roots=n)
-        return changegroupmod.cg1unpacker(f, 'UN')
+        f = self._callcompressable(b"changegroup", roots=n)
+        return changegroupmod.cg1unpacker(f, b'UN')
 
     def changegroupsubset(self, bases, heads, source):
-        self.requirecap('changegroupsubset', _('look up remote changes'))
+        self.requirecap(b'changegroupsubset', _(b'look up remote changes'))
         bases = wireprototypes.encodelist(bases)
         heads = wireprototypes.encodelist(heads)
         f = self._callcompressable(
-            "changegroupsubset", bases=bases, heads=heads
+            b"changegroupsubset", bases=bases, heads=heads
         )
-        return changegroupmod.cg1unpacker(f, 'UN')
+        return changegroupmod.cg1unpacker(f, b'UN')
 
     # End of ipeerlegacycommands interface.
 
@@ -565,28 +565,28 @@
         Returns an iterator of the raw responses from the server.
         """
         ui = self.ui
-        if ui.debugflag and ui.configbool('devel', 'debug.peer-request'):
-            ui.debug('devel-peer-request: batched-content\n')
+        if ui.debugflag and ui.configbool(b'devel', b'debug.peer-request'):
+            ui.debug(b'devel-peer-request: batched-content\n')
             for op, args in req:
-                msg = 'devel-peer-request:    - %s (%d arguments)\n'
+                msg = b'devel-peer-request:    - %s (%d arguments)\n'
                 ui.debug(msg % (op, len(args)))
 
         unescapearg = wireprototypes.unescapebatcharg
 
-        rsp = self._callstream("batch", cmds=encodebatchcmds(req))
+        rsp = self._callstream(b"batch", cmds=encodebatchcmds(req))
         chunk = rsp.read(1024)
         work = [chunk]
         while chunk:
-            while ';' not in chunk and chunk:
+            while b';' not in chunk and chunk:
                 chunk = rsp.read(1024)
                 work.append(chunk)
-            merged = ''.join(work)
-            while ';' in merged:
-                one, merged = merged.split(';', 1)
+            merged = b''.join(work)
+            while b';' in merged:
+                one, merged = merged.split(b';', 1)
                 yield unescapearg(one)
             chunk = rsp.read(1024)
             work = [merged, chunk]
-        yield unescapearg(''.join(work))
+        yield unescapearg(b''.join(work))
 
     def _submitone(self, op, args):
         return self._call(op, **pycompat.strkwargs(args))
@@ -598,7 +598,7 @@
             opts[r'three'] = three
         if four is not None:
             opts[r'four'] = four
-        return self._call('debugwireargs', one=one, two=two, **opts)
+        return self._call(b'debugwireargs', one=one, two=two, **opts)
 
     def _call(self, cmd, **args):
         """execute <cmd> on the server
--- a/mercurial/wireprotov1server.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/wireprotov1server.py	Sun Oct 06 09:48:39 2019 -0400
@@ -38,11 +38,11 @@
 urlerr = util.urlerr
 urlreq = util.urlreq
 
-bundle2requiredmain = _('incompatible Mercurial client; bundle2 required')
+bundle2requiredmain = _(b'incompatible Mercurial client; bundle2 required')
 bundle2requiredhint = _(
-    'see https://www.mercurial-scm.org/wiki/' 'IncompatibleClient'
+    b'see https://www.mercurial-scm.org/wiki/' b'IncompatibleClient'
 )
-bundle2required = '%s\n(%s)\n' % (bundle2requiredmain, bundle2requiredhint)
+bundle2required = b'%s\n(%s)\n' % (bundle2requiredmain, bundle2requiredhint)
 
 
 def clientcompressionsupport(proto):
@@ -53,9 +53,9 @@
     been announced, fallback to the default of zlib and uncompressed.
     """
     for cap in proto.getprotocaps():
-        if cap.startswith('comp='):
-            return cap[5:].split(',')
-    return ['zlib', 'none']
+        if cap.startswith(b'comp='):
+            return cap[5:].split(b',')
+    return [b'zlib', b'none']
 
 
 # wire protocol command can either return a string or one of these classes.
@@ -68,7 +68,7 @@
     extensions that need commands to operate on different repo views under
     specialized circumstances.
     """
-    viewconfig = repo.ui.config('server', 'view')
+    viewconfig = repo.ui.config(b'server', b'view')
     return repo.filtered(viewconfig)
 
 
@@ -89,8 +89,8 @@
             del others[k]
     if others:
         procutil.stderr.write(
-            "warning: %s ignored unexpected arguments %s\n"
-            % (cmd, ",".join(others))
+            b"warning: %s ignored unexpected arguments %s\n"
+            % (cmd, b",".join(others))
         )
     return opts
 
@@ -106,29 +106,29 @@
     4. server.bundle1
     """
     ui = repo.ui
-    gd = 'generaldelta' in repo.requirements
+    gd = b'generaldelta' in repo.requirements
 
     if gd:
-        v = ui.configbool('server', 'bundle1gd.%s' % action)
+        v = ui.configbool(b'server', b'bundle1gd.%s' % action)
         if v is not None:
             return v
 
-    v = ui.configbool('server', 'bundle1.%s' % action)
+    v = ui.configbool(b'server', b'bundle1.%s' % action)
     if v is not None:
         return v
 
     if gd:
-        v = ui.configbool('server', 'bundle1gd')
+        v = ui.configbool(b'server', b'bundle1gd')
         if v is not None:
             return v
 
-    return ui.configbool('server', 'bundle1')
+    return ui.configbool(b'server', b'bundle1')
 
 
 commands = wireprototypes.commanddict()
 
 
-def wireprotocommand(name, args=None, permission='push'):
+def wireprotocommand(name, args=None, permission=b'push'):
     """Decorator to declare a wire protocol command.
 
     ``name`` is the name of the wire protocol command being provided.
@@ -144,7 +144,7 @@
     a repository that is supposed to be read-only.
     """
     transports = {
-        k for k, v in wireprototypes.TRANSPORTS.items() if v['version'] == 1
+        k for k, v in wireprototypes.TRANSPORTS.items() if v[b'version'] == 1
     }
 
     # Because SSHv2 is a mirror of SSHv1, we allow "batch" commands through to
@@ -153,24 +153,24 @@
     if name == b'batch':
         transports.add(wireprototypes.SSHV2)
 
-    if permission not in ('push', 'pull'):
+    if permission not in (b'push', b'pull'):
         raise error.ProgrammingError(
-            'invalid wire protocol permission; '
-            'got %s; expected "push" or "pull"' % permission
+            b'invalid wire protocol permission; '
+            b'got %s; expected "push" or "pull"' % permission
         )
 
     if args is None:
-        args = ''
+        args = b''
 
     if not isinstance(args, bytes):
         raise error.ProgrammingError(
-            'arguments for version 1 commands ' 'must be declared as bytes'
+            b'arguments for version 1 commands ' b'must be declared as bytes'
         )
 
     def register(func):
         if name in commands:
             raise error.ProgrammingError(
-                '%s command already registered ' 'for version 1' % name
+                b'%s command already registered ' b'for version 1' % name
             )
         commands[name] = wireprototypes.commandentry(
             func, args=args, transports=transports, permission=permission
@@ -182,34 +182,34 @@
 
 
 # TODO define a more appropriate permissions type to use for this.
-@wireprotocommand('batch', 'cmds *', permission='pull')
+@wireprotocommand(b'batch', b'cmds *', permission=b'pull')
 def batch(repo, proto, cmds, others):
     unescapearg = wireprototypes.unescapebatcharg
     res = []
-    for pair in cmds.split(';'):
-        op, args = pair.split(' ', 1)
+    for pair in cmds.split(b';'):
+        op, args = pair.split(b' ', 1)
         vals = {}
-        for a in args.split(','):
+        for a in args.split(b','):
             if a:
-                n, v = a.split('=')
+                n, v = a.split(b'=')
                 vals[unescapearg(n)] = unescapearg(v)
         func, spec = commands[op]
 
         # Validate that client has permissions to perform this command.
         perm = commands[op].permission
-        assert perm in ('push', 'pull')
+        assert perm in (b'push', b'pull')
         proto.checkperm(perm)
 
         if spec:
             keys = spec.split()
             data = {}
             for k in keys:
-                if k == '*':
+                if k == b'*':
                     star = {}
                     for key in vals.keys():
                         if key not in keys:
                             star[key] = vals[key]
-                    data['*'] = star
+                    data[b'*'] = star
                 else:
                     data[k] = vals[k]
             result = func(repo, proto, *[data[k] for k in keys])
@@ -225,42 +225,42 @@
             result = result.data
         res.append(wireprototypes.escapebatcharg(result))
 
-    return wireprototypes.bytesresponse(';'.join(res))
+    return wireprototypes.bytesresponse(b';'.join(res))
 
 
-@wireprotocommand('between', 'pairs', permission='pull')
+@wireprotocommand(b'between', b'pairs', permission=b'pull')
 def between(repo, proto, pairs):
-    pairs = [wireprototypes.decodelist(p, '-') for p in pairs.split(" ")]
+    pairs = [wireprototypes.decodelist(p, b'-') for p in pairs.split(b" ")]
     r = []
     for b in repo.between(pairs):
-        r.append(wireprototypes.encodelist(b) + "\n")
+        r.append(wireprototypes.encodelist(b) + b"\n")
 
-    return wireprototypes.bytesresponse(''.join(r))
+    return wireprototypes.bytesresponse(b''.join(r))
 
 
-@wireprotocommand('branchmap', permission='pull')
+@wireprotocommand(b'branchmap', permission=b'pull')
 def branchmap(repo, proto):
     branchmap = repo.branchmap()
     heads = []
     for branch, nodes in branchmap.iteritems():
         branchname = urlreq.quote(encoding.fromlocal(branch))
         branchnodes = wireprototypes.encodelist(nodes)
-        heads.append('%s %s' % (branchname, branchnodes))
+        heads.append(b'%s %s' % (branchname, branchnodes))
 
-    return wireprototypes.bytesresponse('\n'.join(heads))
+    return wireprototypes.bytesresponse(b'\n'.join(heads))
 
 
-@wireprotocommand('branches', 'nodes', permission='pull')
+@wireprotocommand(b'branches', b'nodes', permission=b'pull')
 def branches(repo, proto, nodes):
     nodes = wireprototypes.decodelist(nodes)
     r = []
     for b in repo.branches(nodes):
-        r.append(wireprototypes.encodelist(b) + "\n")
+        r.append(wireprototypes.encodelist(b) + b"\n")
 
-    return wireprototypes.bytesresponse(''.join(r))
+    return wireprototypes.bytesresponse(b''.join(r))
 
 
-@wireprotocommand('clonebundles', '', permission='pull')
+@wireprotocommand(b'clonebundles', b'', permission=b'pull')
 def clonebundles(repo, proto):
     """Server command for returning info for available bundles to seed clones.
 
@@ -271,17 +271,17 @@
     data center given the client's IP address.
     """
     return wireprototypes.bytesresponse(
-        repo.vfs.tryread('clonebundles.manifest')
+        repo.vfs.tryread(b'clonebundles.manifest')
     )
 
 
 wireprotocaps = [
-    'lookup',
-    'branchmap',
-    'pushkey',
-    'known',
-    'getbundle',
-    'unbundlehash',
+    b'lookup',
+    b'branchmap',
+    b'pushkey',
+    b'known',
+    b'getbundle',
+    b'unbundlehash',
 ]
 
 
@@ -300,27 +300,27 @@
 
     # Command of same name as capability isn't exposed to version 1 of
     # transports. So conditionally add it.
-    if commands.commandavailable('changegroupsubset', proto):
-        caps.append('changegroupsubset')
+    if commands.commandavailable(b'changegroupsubset', proto):
+        caps.append(b'changegroupsubset')
 
     if streamclone.allowservergeneration(repo):
-        if repo.ui.configbool('server', 'preferuncompressed'):
-            caps.append('stream-preferred')
+        if repo.ui.configbool(b'server', b'preferuncompressed'):
+            caps.append(b'stream-preferred')
         requiredformats = repo.requirements & repo.supportedformats
         # if our local revlogs are just revlogv1, add 'stream' cap
-        if not requiredformats - {'revlogv1'}:
-            caps.append('stream')
+        if not requiredformats - {b'revlogv1'}:
+            caps.append(b'stream')
         # otherwise, add 'streamreqs' detailing our local revlog format
         else:
-            caps.append('streamreqs=%s' % ','.join(sorted(requiredformats)))
-    if repo.ui.configbool('experimental', 'bundle2-advertise'):
-        capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role='server'))
-        caps.append('bundle2=' + urlreq.quote(capsblob))
-    caps.append('unbundle=%s' % ','.join(bundle2.bundlepriority))
+            caps.append(b'streamreqs=%s' % b','.join(sorted(requiredformats)))
+    if repo.ui.configbool(b'experimental', b'bundle2-advertise'):
+        capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=b'server'))
+        caps.append(b'bundle2=' + urlreq.quote(capsblob))
+    caps.append(b'unbundle=%s' % b','.join(bundle2.bundlepriority))
 
-    if repo.ui.configbool('experimental', 'narrow'):
+    if repo.ui.configbool(b'experimental', b'narrow'):
         caps.append(wireprototypes.NARROWCAP)
-        if repo.ui.configbool('experimental', 'narrowservebrokenellipses'):
+        if repo.ui.configbool(b'experimental', b'narrowservebrokenellipses'):
             caps.append(wireprototypes.ELLIPSESCAP)
 
     return proto.addcapabilities(repo, caps)
@@ -328,37 +328,37 @@
 
 # If you are writing an extension and consider wrapping this function. Wrap
 # `_capabilities` instead.
-@wireprotocommand('capabilities', permission='pull')
+@wireprotocommand(b'capabilities', permission=b'pull')
 def capabilities(repo, proto):
     caps = _capabilities(repo, proto)
-    return wireprototypes.bytesresponse(' '.join(sorted(caps)))
+    return wireprototypes.bytesresponse(b' '.join(sorted(caps)))
 
 
-@wireprotocommand('changegroup', 'roots', permission='pull')
+@wireprotocommand(b'changegroup', b'roots', permission=b'pull')
 def changegroup(repo, proto, roots):
     nodes = wireprototypes.decodelist(roots)
     outgoing = discovery.outgoing(
         repo, missingroots=nodes, missingheads=repo.heads()
     )
-    cg = changegroupmod.makechangegroup(repo, outgoing, '01', 'serve')
-    gen = iter(lambda: cg.read(32768), '')
+    cg = changegroupmod.makechangegroup(repo, outgoing, b'01', b'serve')
+    gen = iter(lambda: cg.read(32768), b'')
     return wireprototypes.streamres(gen=gen)
 
 
-@wireprotocommand('changegroupsubset', 'bases heads', permission='pull')
+@wireprotocommand(b'changegroupsubset', b'bases heads', permission=b'pull')
 def changegroupsubset(repo, proto, bases, heads):
     bases = wireprototypes.decodelist(bases)
     heads = wireprototypes.decodelist(heads)
     outgoing = discovery.outgoing(repo, missingroots=bases, missingheads=heads)
-    cg = changegroupmod.makechangegroup(repo, outgoing, '01', 'serve')
-    gen = iter(lambda: cg.read(32768), '')
+    cg = changegroupmod.makechangegroup(repo, outgoing, b'01', b'serve')
+    gen = iter(lambda: cg.read(32768), b'')
     return wireprototypes.streamres(gen=gen)
 
 
-@wireprotocommand('debugwireargs', 'one two *', permission='pull')
+@wireprotocommand(b'debugwireargs', b'one two *', permission=b'pull')
 def debugwireargs(repo, proto, one, two, others):
     # only accept optional args from the known set
-    opts = options('debugwireargs', ['three', 'four'], others)
+    opts = options(b'debugwireargs', [b'three', b'four'], others)
     return wireprototypes.bytesresponse(
         repo.debugwireargs(one, two, **pycompat.strkwargs(opts))
     )
@@ -381,9 +381,9 @@
     """
 
     def decodehexstring(s):
-        return {binascii.unhexlify(h) for h in s.split(';')}
+        return {binascii.unhexlify(h) for h in s.split(b';')}
 
-    manifest = repo.vfs.tryread('pullbundles.manifest')
+    manifest = repo.vfs.tryread(b'pullbundles.manifest')
     if not manifest:
         return None
     res = exchange.parseclonebundlesmanifest(repo, manifest)
@@ -395,15 +395,15 @@
     common_anc = cl.ancestors([cl.rev(rev) for rev in common], inclusive=True)
     compformats = clientcompressionsupport(proto)
     for entry in res:
-        comp = entry.get('COMPRESSION')
+        comp = entry.get(b'COMPRESSION')
         altcomp = util.compengines._bundlenames.get(comp)
         if comp and comp not in compformats and altcomp not in compformats:
             continue
         # No test yet for VERSION, since V2 is supported by any client
         # that advertises partial pulls
-        if 'heads' in entry:
+        if b'heads' in entry:
             try:
-                bundle_heads = decodehexstring(entry['heads'])
+                bundle_heads = decodehexstring(entry[b'heads'])
             except TypeError:
                 # Bad heads entry
                 continue
@@ -416,61 +416,61 @@
                 for rev in bundle_heads
             ):
                 continue
-        if 'bases' in entry:
+        if b'bases' in entry:
             try:
-                bundle_bases = decodehexstring(entry['bases'])
+                bundle_bases = decodehexstring(entry[b'bases'])
             except TypeError:
                 # Bad bases entry
                 continue
             if not all(cl.rev(rev) in common_anc for rev in bundle_bases):
                 continue
-        path = entry['URL']
-        repo.ui.debug('sending pullbundle "%s"\n' % path)
+        path = entry[b'URL']
+        repo.ui.debug(b'sending pullbundle "%s"\n' % path)
         try:
             return repo.vfs.open(path)
         except IOError:
-            repo.ui.debug('pullbundle "%s" not accessible\n' % path)
+            repo.ui.debug(b'pullbundle "%s" not accessible\n' % path)
             continue
     return None
 
 
-@wireprotocommand('getbundle', '*', permission='pull')
+@wireprotocommand(b'getbundle', b'*', permission=b'pull')
 def getbundle(repo, proto, others):
     opts = options(
-        'getbundle', wireprototypes.GETBUNDLE_ARGUMENTS.keys(), others
+        b'getbundle', wireprototypes.GETBUNDLE_ARGUMENTS.keys(), others
     )
     for k, v in opts.iteritems():
         keytype = wireprototypes.GETBUNDLE_ARGUMENTS[k]
-        if keytype == 'nodes':
+        if keytype == b'nodes':
             opts[k] = wireprototypes.decodelist(v)
-        elif keytype == 'csv':
-            opts[k] = list(v.split(','))
-        elif keytype == 'scsv':
-            opts[k] = set(v.split(','))
-        elif keytype == 'boolean':
+        elif keytype == b'csv':
+            opts[k] = list(v.split(b','))
+        elif keytype == b'scsv':
+            opts[k] = set(v.split(b','))
+        elif keytype == b'boolean':
             # Client should serialize False as '0', which is a non-empty string
             # so it evaluates as a True bool.
-            if v == '0':
+            if v == b'0':
                 opts[k] = False
             else:
                 opts[k] = bool(v)
-        elif keytype != 'plain':
-            raise KeyError('unknown getbundle option type %s' % keytype)
+        elif keytype != b'plain':
+            raise KeyError(b'unknown getbundle option type %s' % keytype)
 
-    if not bundle1allowed(repo, 'pull'):
-        if not exchange.bundle2requested(opts.get('bundlecaps')):
-            if proto.name == 'http-v1':
+    if not bundle1allowed(repo, b'pull'):
+        if not exchange.bundle2requested(opts.get(b'bundlecaps')):
+            if proto.name == b'http-v1':
                 return wireprototypes.ooberror(bundle2required)
             raise error.Abort(bundle2requiredmain, hint=bundle2requiredhint)
 
     try:
         clheads = set(repo.changelog.heads())
-        heads = set(opts.get('heads', set()))
-        common = set(opts.get('common', set()))
+        heads = set(opts.get(b'heads', set()))
+        common = set(opts.get(b'common', set()))
         common.discard(nullid)
         if (
-            repo.ui.configbool('server', 'pullbundle')
-            and 'partial-pull' in proto.getprotocaps()
+            repo.ui.configbool(b'server', b'pullbundle')
+            and b'partial-pull' in proto.getprotocaps()
         ):
             # Check if a pre-built bundle covers this request.
             bundle = find_pullbundle(repo, proto, opts, clheads, heads, common)
@@ -479,32 +479,32 @@
                     gen=util.filechunkiter(bundle), prefer_uncompressed=True
                 )
 
-        if repo.ui.configbool('server', 'disablefullbundle'):
+        if repo.ui.configbool(b'server', b'disablefullbundle'):
             # Check to see if this is a full clone.
-            changegroup = opts.get('cg', True)
+            changegroup = opts.get(b'cg', True)
             if changegroup and not common and clheads == heads:
                 raise error.Abort(
-                    _('server has pull-based clones disabled'),
-                    hint=_('remove --pull if specified or upgrade Mercurial'),
+                    _(b'server has pull-based clones disabled'),
+                    hint=_(b'remove --pull if specified or upgrade Mercurial'),
                 )
 
         info, chunks = exchange.getbundlechunks(
-            repo, 'serve', **pycompat.strkwargs(opts)
+            repo, b'serve', **pycompat.strkwargs(opts)
         )
-        prefercompressed = info.get('prefercompressed', True)
+        prefercompressed = info.get(b'prefercompressed', True)
     except error.Abort as exc:
         # cleanly forward Abort error to the client
-        if not exchange.bundle2requested(opts.get('bundlecaps')):
-            if proto.name == 'http-v1':
-                return wireprototypes.ooberror(pycompat.bytestr(exc) + '\n')
+        if not exchange.bundle2requested(opts.get(b'bundlecaps')):
+            if proto.name == b'http-v1':
+                return wireprototypes.ooberror(pycompat.bytestr(exc) + b'\n')
             raise  # cannot do better for bundle1 + ssh
         # bundle2 request expect a bundle2 reply
         bundler = bundle2.bundle20(repo.ui)
-        manargs = [('message', pycompat.bytestr(exc))]
+        manargs = [(b'message', pycompat.bytestr(exc))]
         advargs = []
         if exc.hint is not None:
-            advargs.append(('hint', exc.hint))
-        bundler.addpart(bundle2.bundlepart('error:abort', manargs, advargs))
+            advargs.append((b'hint', exc.hint))
+        bundler.addpart(bundle2.bundlepart(b'error:abort', manargs, advargs))
         chunks = bundler.getchunks()
         prefercompressed = False
 
@@ -513,13 +513,13 @@
     )
 
 
-@wireprotocommand('heads', permission='pull')
+@wireprotocommand(b'heads', permission=b'pull')
 def heads(repo, proto):
     h = repo.heads()
-    return wireprototypes.bytesresponse(wireprototypes.encodelist(h) + '\n')
+    return wireprototypes.bytesresponse(wireprototypes.encodelist(h) + b'\n')
 
 
-@wireprotocommand('hello', permission='pull')
+@wireprotocommand(b'hello', permission=b'pull')
 def hello(repo, proto):
     """Called as part of SSH handshake to obtain server info.
 
@@ -532,16 +532,16 @@
         capabilities: <token0> <token1> <token2>
     """
     caps = capabilities(repo, proto).data
-    return wireprototypes.bytesresponse('capabilities: %s\n' % caps)
+    return wireprototypes.bytesresponse(b'capabilities: %s\n' % caps)
 
 
-@wireprotocommand('listkeys', 'namespace', permission='pull')
+@wireprotocommand(b'listkeys', b'namespace', permission=b'pull')
 def listkeys(repo, proto, namespace):
     d = sorted(repo.listkeys(encoding.tolocal(namespace)).items())
     return wireprototypes.bytesresponse(pushkeymod.encodekeys(d))
 
 
-@wireprotocommand('lookup', 'key', permission='pull')
+@wireprotocommand(b'lookup', b'key', permission=b'pull')
 def lookup(repo, proto, key):
     try:
         k = encoding.tolocal(key)
@@ -551,25 +551,25 @@
     except Exception as inst:
         r = stringutil.forcebytestr(inst)
         success = 0
-    return wireprototypes.bytesresponse('%d %s\n' % (success, r))
+    return wireprototypes.bytesresponse(b'%d %s\n' % (success, r))
 
 
-@wireprotocommand('known', 'nodes *', permission='pull')
+@wireprotocommand(b'known', b'nodes *', permission=b'pull')
 def known(repo, proto, nodes, others):
-    v = ''.join(
-        b and '1' or '0' for b in repo.known(wireprototypes.decodelist(nodes))
+    v = b''.join(
+        b and b'1' or b'0' for b in repo.known(wireprototypes.decodelist(nodes))
     )
     return wireprototypes.bytesresponse(v)
 
 
-@wireprotocommand('protocaps', 'caps', permission='pull')
+@wireprotocommand(b'protocaps', b'caps', permission=b'pull')
 def protocaps(repo, proto, caps):
     if proto.name == wireprototypes.SSHV1:
-        proto._protocaps = set(caps.split(' '))
-    return wireprototypes.bytesresponse('OK')
+        proto._protocaps = set(caps.split(b' '))
+    return wireprototypes.bytesresponse(b'OK')
 
 
-@wireprotocommand('pushkey', 'namespace key old new', permission='push')
+@wireprotocommand(b'pushkey', b'namespace key old new', permission=b'push')
 def pushkey(repo, proto, namespace, key, old, new):
     # compatibility with pre-1.8 clients which were accidentally
     # sending raw binary nodes rather than utf-8-encoded hex
@@ -594,11 +594,11 @@
             or False
         )
 
-    output = output.getvalue() if output else ''
-    return wireprototypes.bytesresponse('%d\n%s' % (int(r), output))
+    output = output.getvalue() if output else b''
+    return wireprototypes.bytesresponse(b'%d\n%s' % (int(r), output))
 
 
-@wireprotocommand('stream_out', permission='pull')
+@wireprotocommand(b'stream_out', permission=b'pull')
 def stream(repo, proto):
     '''If the server supports streaming clone, it advertises the "stream"
     capability with a value representing the version and flags of the repo
@@ -607,17 +607,17 @@
     return wireprototypes.streamreslegacy(streamclone.generatev1wireproto(repo))
 
 
-@wireprotocommand('unbundle', 'heads', permission='push')
+@wireprotocommand(b'unbundle', b'heads', permission=b'push')
 def unbundle(repo, proto, heads):
     their_heads = wireprototypes.decodelist(heads)
 
     with proto.mayberedirectstdio() as output:
         try:
-            exchange.check_heads(repo, their_heads, 'preparing changes')
+            exchange.check_heads(repo, their_heads, b'preparing changes')
             cleanup = lambda: None
             try:
                 payload = proto.getpayload()
-                if repo.ui.configbool('server', 'streamunbundle'):
+                if repo.ui.configbool(b'server', b'streamunbundle'):
 
                     def cleanup():
                         # Ensure that the full payload is consumed, so
@@ -636,11 +636,11 @@
                         if tempname:
                             os.unlink(tempname)
 
-                    fd, tempname = pycompat.mkstemp(prefix='hg-unbundle-')
+                    fd, tempname = pycompat.mkstemp(prefix=b'hg-unbundle-')
                     repo.ui.debug(
-                        'redirecting incoming bundle to %s\n' % tempname
+                        b'redirecting incoming bundle to %s\n' % tempname
                     )
-                    fp = os.fdopen(fd, pycompat.sysstr('wb+'))
+                    fp = os.fdopen(fd, pycompat.sysstr(b'wb+'))
                     for p in payload:
                         fp.write(p)
                     fp.seek(0)
@@ -648,8 +648,8 @@
                 gen = exchange.readbundle(repo.ui, fp, None)
                 if isinstance(
                     gen, changegroupmod.cg1unpacker
-                ) and not bundle1allowed(repo, 'push'):
-                    if proto.name == 'http-v1':
+                ) and not bundle1allowed(repo, b'push'):
+                    if proto.name == b'http-v1':
                         # need to special case http because stderr do not get to
                         # the http client on failed push so we need to abuse
                         # some other error type to make sure the message get to
@@ -660,14 +660,14 @@
                     )
 
                 r = exchange.unbundle(
-                    repo, gen, their_heads, 'serve', proto.client()
+                    repo, gen, their_heads, b'serve', proto.client()
                 )
-                if util.safehasattr(r, 'addpart'):
+                if util.safehasattr(r, b'addpart'):
                     # The return looks streamable, we are in the bundle2 case
                     # and should return a stream.
                     return wireprototypes.streamreslegacy(gen=r.getchunks())
                 return wireprototypes.pushres(
-                    r, output.getvalue() if output else ''
+                    r, output.getvalue() if output else b''
                 )
 
             finally:
@@ -683,17 +683,17 @@
                     # We did not change it to minimise code change.
                     # This need to be moved to something proper.
                     # Feel free to do it.
-                    procutil.stderr.write("abort: %s\n" % exc)
+                    procutil.stderr.write(b"abort: %s\n" % exc)
                     if exc.hint is not None:
-                        procutil.stderr.write("(%s)\n" % exc.hint)
+                        procutil.stderr.write(b"(%s)\n" % exc.hint)
                     procutil.stderr.flush()
                     return wireprototypes.pushres(
-                        0, output.getvalue() if output else ''
+                        0, output.getvalue() if output else b''
                     )
                 except error.PushRaced:
                     return wireprototypes.pusherr(
                         pycompat.bytestr(exc),
-                        output.getvalue() if output else '',
+                        output.getvalue() if output else b'',
                     )
 
             bundler = bundle2.bundle20(repo.ui)
@@ -707,41 +707,41 @@
                     remotecaps = getattr(exc, '_replycaps', None)
                     if (
                         remotecaps is not None
-                        and 'pushkey' not in remotecaps.get('error', ())
+                        and b'pushkey' not in remotecaps.get(b'error', ())
                     ):
                         # no support remote side, fallback to Abort handler.
                         raise
-                    part = bundler.newpart('error:pushkey')
-                    part.addparam('in-reply-to', exc.partid)
+                    part = bundler.newpart(b'error:pushkey')
+                    part.addparam(b'in-reply-to', exc.partid)
                     if exc.namespace is not None:
                         part.addparam(
-                            'namespace', exc.namespace, mandatory=False
+                            b'namespace', exc.namespace, mandatory=False
                         )
                     if exc.key is not None:
-                        part.addparam('key', exc.key, mandatory=False)
+                        part.addparam(b'key', exc.key, mandatory=False)
                     if exc.new is not None:
-                        part.addparam('new', exc.new, mandatory=False)
+                        part.addparam(b'new', exc.new, mandatory=False)
                     if exc.old is not None:
-                        part.addparam('old', exc.old, mandatory=False)
+                        part.addparam(b'old', exc.old, mandatory=False)
                     if exc.ret is not None:
-                        part.addparam('ret', exc.ret, mandatory=False)
+                        part.addparam(b'ret', exc.ret, mandatory=False)
             except error.BundleValueError as exc:
-                errpart = bundler.newpart('error:unsupportedcontent')
+                errpart = bundler.newpart(b'error:unsupportedcontent')
                 if exc.parttype is not None:
-                    errpart.addparam('parttype', exc.parttype)
+                    errpart.addparam(b'parttype', exc.parttype)
                 if exc.params:
-                    errpart.addparam('params', '\0'.join(exc.params))
+                    errpart.addparam(b'params', b'\0'.join(exc.params))
             except error.Abort as exc:
-                manargs = [('message', stringutil.forcebytestr(exc))]
+                manargs = [(b'message', stringutil.forcebytestr(exc))]
                 advargs = []
                 if exc.hint is not None:
-                    advargs.append(('hint', exc.hint))
+                    advargs.append((b'hint', exc.hint))
                 bundler.addpart(
-                    bundle2.bundlepart('error:abort', manargs, advargs)
+                    bundle2.bundlepart(b'error:abort', manargs, advargs)
                 )
             except error.PushRaced as exc:
                 bundler.newpart(
-                    'error:pushraced',
-                    [('message', stringutil.forcebytestr(exc))],
+                    b'error:pushraced',
+                    [(b'message', stringutil.forcebytestr(exc))],
                 )
             return wireprototypes.streamreslegacy(gen=bundler.getchunks())
--- a/mercurial/wireprotov2peer.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/wireprotov2peer.py	Sun Oct 06 09:48:39 2019 -0400
@@ -58,14 +58,14 @@
     """
     if target.get(b'protocol') not in SUPPORTED_REDIRECT_PROTOCOLS:
         ui.note(
-            _('(remote redirect target %s uses unsupported protocol: %s)\n')
+            _(b'(remote redirect target %s uses unsupported protocol: %s)\n')
             % (target[b'name'], target.get(b'protocol', b''))
         )
         return False
 
     if target.get(b'snirequired') and not sslutil.hassni:
         ui.note(
-            _('(redirect target %s requires SNI, which is unsupported)\n')
+            _(b'(redirect target %s requires SNI, which is unsupported)\n')
             % target[b'name']
         )
         return False
@@ -81,14 +81,14 @@
         if not tlsversions & supported:
             ui.note(
                 _(
-                    '(remote redirect target %s requires unsupported TLS '
-                    'versions: %s)\n'
+                    b'(remote redirect target %s requires unsupported TLS '
+                    b'versions: %s)\n'
                 )
                 % (target[b'name'], b', '.join(sorted(tlsversions)))
             )
             return False
 
-    ui.note(_('(remote redirect target %s is compatible)\n') % target[b'name'])
+    ui.note(_(b'(remote redirect target %s is compatible)\n') % target[b'name'])
 
     return True
 
@@ -181,9 +181,9 @@
                 if self._redirect:
                     raise error.Abort(
                         _(
-                            'received unexpected response data '
-                            'after content redirect; the remote is '
-                            'buggy'
+                            b'received unexpected response data '
+                            b'after content redirect; the remote is '
+                            b'buggy'
                         )
                     )
 
@@ -215,9 +215,9 @@
             )
             return
 
-        atoms = [{'msg': o[b'error'][b'message']}]
+        atoms = [{b'msg': o[b'error'][b'message']}]
         if b'args' in o[b'error']:
-            atoms[0]['args'] = o[b'error'][b'args']
+            atoms[0][b'args'] = o[b'error'][b'args']
 
         raise error.RepoError(formatrichmessage(atoms))
 
@@ -293,8 +293,8 @@
             command, args, redirect=redirect
         )
 
-        if action != 'noop':
-            raise error.ProgrammingError('%s not yet supported' % action)
+        if action != b'noop':
+            raise error.ProgrammingError(b'%s not yet supported' % action)
 
         rid = request.requestid
         self._requests[rid] = request
@@ -312,10 +312,10 @@
         """
         action, meta = self._reactor.flushcommands()
 
-        if action != 'sendframes':
-            raise error.ProgrammingError('%s not yet supported' % action)
+        if action != b'sendframes':
+            raise error.ProgrammingError(b'%s not yet supported' % action)
 
-        return meta['framegen']
+        return meta[b'framegen']
 
     def readdata(self, framefh):
         """Attempt to read data and do work.
@@ -329,7 +329,7 @@
                 # TODO tell reactor?
                 self._frameseof = True
             else:
-                self._ui.debug('received %r\n' % frame)
+                self._ui.debug(b'received %r\n' % frame)
                 self._processframe(frame)
 
         # Also try to read the first redirect.
@@ -347,8 +347,8 @@
 
         action, meta = self._reactor.onframerecv(frame)
 
-        if action == 'error':
-            e = error.RepoError(meta['message'])
+        if action == b'error':
+            e = error.RepoError(meta[b'message'])
 
             if frame.requestid in self._responses:
                 self._responses[frame.requestid]._oninputcomplete()
@@ -360,24 +360,24 @@
                 raise e
 
             return
-        elif action == 'noop':
+        elif action == b'noop':
             return
-        elif action == 'responsedata':
+        elif action == b'responsedata':
             # Handled below.
             pass
         else:
-            raise error.ProgrammingError('action not handled: %s' % action)
+            raise error.ProgrammingError(b'action not handled: %s' % action)
 
         if frame.requestid not in self._requests:
             raise error.ProgrammingError(
-                'received frame for unknown request; this is either a bug in '
-                'the clientreactor not screening for this or this instance was '
-                'never told about this request: %r' % frame
+                b'received frame for unknown request; this is either a bug in '
+                b'the clientreactor not screening for this or this instance was '
+                b'never told about this request: %r' % frame
             )
 
         response = self._responses[frame.requestid]
 
-        if action == 'responsedata':
+        if action == b'responsedata':
             # Any failures processing this frame should bubble up to the
             # future tracking the request.
             try:
@@ -397,12 +397,12 @@
                     response._onerror(e)
         else:
             raise error.ProgrammingError(
-                'unhandled action from clientreactor: %s' % action
+                b'unhandled action from clientreactor: %s' % action
             )
 
     def _processresponsedata(self, frame, meta, response):
         # This can raise. The caller can handle it.
-        response._onresponsedata(meta['data'])
+        response._onresponsedata(meta[b'data'])
 
         # We need to be careful about resolving futures prematurely. If a
         # response is a redirect response, resolving the future before the
@@ -414,7 +414,7 @@
         # EOS occurs or until the initial response object is fully received.
 
         # Always react to eos.
-        if meta['eos']:
+        if meta[b'eos']:
             response._oninputcomplete()
             del self._requests[frame.requestid]
 
@@ -446,28 +446,28 @@
 
     def _followredirect(self, requestid, redirect):
         """Called to initiate redirect following for a request."""
-        self._ui.note(_('(following redirect to %s)\n') % redirect.url)
+        self._ui.note(_(b'(following redirect to %s)\n') % redirect.url)
 
         # TODO handle framed responses.
         if redirect.mediatype != b'application/mercurial-cbor':
             raise error.Abort(
-                _('cannot handle redirects for the %s media type')
+                _(b'cannot handle redirects for the %s media type')
                 % redirect.mediatype
             )
 
         if redirect.fullhashes:
             self._ui.warn(
                 _(
-                    '(support for validating hashes on content '
-                    'redirects not supported)\n'
+                    b'(support for validating hashes on content '
+                    b'redirects not supported)\n'
                 )
             )
 
         if redirect.serverdercerts or redirect.servercadercerts:
             self._ui.warn(
                 _(
-                    '(support for pinning server certificates on '
-                    'content redirects not supported)\n'
+                    b'(support for pinning server certificates on '
+                    b'content redirects not supported)\n'
                 )
             )
 
@@ -481,10 +481,10 @@
             res = self._opener.open(req)
         except util.urlerr.httperror as e:
             if e.code == 401:
-                raise error.Abort(_('authorization failed'))
+                raise error.Abort(_(b'authorization failed'))
             raise
         except util.httplib.HTTPException as e:
-            self._ui.debug('http error requesting %s\n' % req.get_full_url())
+            self._ui.debug(b'http error requesting %s\n' % req.get_full_url())
             self._ui.traceback()
             raise IOError(None, e)
 
@@ -567,10 +567,10 @@
 
 
 COMMAND_DECODERS = {
-    'branchmap': decodebranchmap,
-    'heads': decodeheads,
-    'known': decodeknown,
-    'listkeys': decodelistkeys,
-    'lookup': decodelookup,
-    'pushkey': decodepushkey,
+    b'branchmap': decodebranchmap,
+    b'heads': decodeheads,
+    b'known': decodeknown,
+    b'listkeys': decodelistkeys,
+    b'lookup': decodelookup,
+    b'pushkey': decodepushkey,
 }
--- a/mercurial/wireprotov2server.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/wireprotov2server.py	Sun Oct 06 09:48:39 2019 -0400
@@ -56,14 +56,14 @@
     if not urlparts:
         res.status = b'200 OK'
         res.headers[b'Content-Type'] = b'text/plain'
-        res.setbodybytes(_('HTTP version 2 API handler'))
+        res.setbodybytes(_(b'HTTP version 2 API handler'))
         return
 
     if len(urlparts) == 1:
         res.status = b'404 Not Found'
         res.headers[b'Content-Type'] = b'text/plain'
         res.setbodybytes(
-            _('do not know how to process %s\n') % req.dispatchpath
+            _(b'do not know how to process %s\n') % req.dispatchpath
         )
         return
 
@@ -72,13 +72,13 @@
     if permission not in (b'ro', b'rw'):
         res.status = b'404 Not Found'
         res.headers[b'Content-Type'] = b'text/plain'
-        res.setbodybytes(_('unknown permission: %s') % permission)
+        res.setbodybytes(_(b'unknown permission: %s') % permission)
         return
 
-    if req.method != 'POST':
+    if req.method != b'POST':
         res.status = b'405 Method Not Allowed'
         res.headers[b'Allow'] = b'POST'
-        res.setbodybytes(_('commands require POST requests'))
+        res.setbodybytes(_(b'commands require POST requests'))
         return
 
     # At some point we'll want to use our own API instead of recycling the
@@ -86,12 +86,12 @@
     # TODO return reasonable responses - not responses that overload the
     # HTTP status line message for error reporting.
     try:
-        checkperm(rctx, req, 'pull' if permission == b'ro' else 'push')
+        checkperm(rctx, req, b'pull' if permission == b'ro' else b'push')
     except hgwebcommon.ErrorResponse as e:
         res.status = hgwebcommon.statusmessage(e.code, pycompat.bytestr(e))
         for k, v in e.headers:
             res.headers[k] = v
-        res.setbodybytes('permission denied')
+        res.setbodybytes(b'permission denied')
         return
 
     # We have a special endpoint to reflect the request back at the client.
@@ -102,12 +102,12 @@
     # Extra commands that we handle that aren't really wire protocol
     # commands. Think extra hard before making this hackery available to
     # extension.
-    extracommands = {'multirequest'}
+    extracommands = {b'multirequest'}
 
     if command not in COMMANDS and command not in extracommands:
         res.status = b'404 Not Found'
         res.headers[b'Content-Type'] = b'text/plain'
-        res.setbodybytes(_('unknown wire protocol command: %s\n') % command)
+        res.setbodybytes(_(b'unknown wire protocol command: %s\n') % command)
         return
 
     repo = rctx.repo
@@ -121,7 +121,7 @@
     ):
         res.status = b'404 Not Found'
         res.headers[b'Content-Type'] = b'text/plain'
-        res.setbodybytes(_('invalid wire protocol command: %s') % command)
+        res.setbodybytes(_(b'invalid wire protocol command: %s') % command)
         return
 
     # TODO consider cases where proxies may add additional Accept headers.
@@ -129,7 +129,7 @@
         res.status = b'406 Not Acceptable'
         res.headers[b'Content-Type'] = b'text/plain'
         res.setbodybytes(
-            _('client MUST specify Accept header with value: %s\n')
+            _(b'client MUST specify Accept header with value: %s\n')
             % FRAMINGTYPE
         )
         return
@@ -140,7 +140,7 @@
         # since client does Accept it.
         res.headers[b'Content-Type'] = b'text/plain'
         res.setbodybytes(
-            _('client MUST send Content-Type header with ' 'value: %s\n')
+            _(b'client MUST send Content-Type header with ' b'value: %s\n')
             % FRAMINGTYPE
         )
         return
@@ -160,10 +160,10 @@
     """
     # Reflection APIs have a history of being abused, accidentally disclosing
     # sensitive data, etc. So we have a config knob.
-    if not ui.configbool('experimental', 'web.api.debugreflect'):
+    if not ui.configbool(b'experimental', b'web.api.debugreflect'):
         res.status = b'404 Not Found'
         res.headers[b'Content-Type'] = b'text/plain'
-        res.setbodybytes(_('debugreflect service not available'))
+        res.setbodybytes(_(b'debugreflect service not available'))
         return
 
     # We assume we have a unified framing protocol request body.
@@ -187,7 +187,7 @@
         states.append(templatefilters.json((action, meta)))
 
     action, meta = reactor.oninputeof()
-    meta['action'] = action
+    meta[b'action'] = action
     states.append(templatefilters.json(meta))
 
     res.status = b'200 OK'
@@ -216,10 +216,10 @@
 
         action, meta = reactor.onframerecv(frame)
 
-        if action == 'wantframe':
+        if action == b'wantframe':
             # Need more data before we can do anything.
             continue
-        elif action == 'runcommand':
+        elif action == b'runcommand':
             # Defer creating output stream because we need to wait for
             # protocol settings frames so proper encoding can be applied.
             if not outstream:
@@ -243,29 +243,29 @@
 
             seencommand = True
 
-        elif action == 'error':
+        elif action == b'error':
             # TODO define proper error mechanism.
             res.status = b'200 OK'
             res.headers[b'Content-Type'] = b'text/plain'
-            res.setbodybytes(meta['message'] + b'\n')
+            res.setbodybytes(meta[b'message'] + b'\n')
             return
         else:
             raise error.ProgrammingError(
-                'unhandled action from frame processor: %s' % action
+                b'unhandled action from frame processor: %s' % action
             )
 
     action, meta = reactor.oninputeof()
-    if action == 'sendframes':
+    if action == b'sendframes':
         # We assume we haven't started sending the response yet. If we're
         # wrong, the response type will raise an exception.
         res.status = b'200 OK'
         res.headers[b'Content-Type'] = FRAMINGTYPE
-        res.setbodygen(meta['framegen'])
-    elif action == 'noop':
+        res.setbodygen(meta[b'framegen'])
+    elif action == b'noop':
         pass
     else:
         raise error.ProgrammingError(
-            'unhandled action from frame processor: %s' % action
+            b'unhandled action from frame processor: %s' % action
         )
 
 
@@ -301,31 +301,31 @@
     # TODO consider allowing multiple commands to regular command URLs
     # iff each command is the same.
 
-    proto = httpv2protocolhandler(req, ui, args=command['args'])
+    proto = httpv2protocolhandler(req, ui, args=command[b'args'])
 
     if reqcommand == b'multirequest':
-        if not COMMANDS.commandavailable(command['command'], proto):
+        if not COMMANDS.commandavailable(command[b'command'], proto):
             # TODO proper error mechanism
             res.status = b'200 OK'
             res.headers[b'Content-Type'] = b'text/plain'
             res.setbodybytes(
-                _('wire protocol command not available: %s')
-                % command['command']
+                _(b'wire protocol command not available: %s')
+                % command[b'command']
             )
             return True
 
         # TODO don't use assert here, since it may be elided by -O.
         assert authedperm in (b'ro', b'rw')
-        wirecommand = COMMANDS[command['command']]
-        assert wirecommand.permission in ('push', 'pull')
+        wirecommand = COMMANDS[command[b'command']]
+        assert wirecommand.permission in (b'push', b'pull')
 
-        if authedperm == b'ro' and wirecommand.permission != 'pull':
+        if authedperm == b'ro' and wirecommand.permission != b'pull':
             # TODO proper error mechanism
             res.status = b'403 Forbidden'
             res.headers[b'Content-Type'] = b'text/plain'
             res.setbodybytes(
-                _('insufficient permissions to execute ' 'command: %s')
-                % command['command']
+                _(b'insufficient permissions to execute ' b'command: %s')
+                % command[b'command']
             )
             return True
 
@@ -340,53 +340,53 @@
             res.status = b'200 OK'
             res.headers[b'Content-Type'] = b'text/plain'
             res.setbodybytes(
-                _('multiple commands cannot be issued to this ' 'URL')
+                _(b'multiple commands cannot be issued to this ' b'URL')
             )
             return True
 
-        if reqcommand != command['command']:
+        if reqcommand != command[b'command']:
             # TODO define proper error mechanism
             res.status = b'200 OK'
             res.headers[b'Content-Type'] = b'text/plain'
-            res.setbodybytes(_('command in frame must match command in URL'))
+            res.setbodybytes(_(b'command in frame must match command in URL'))
             return True
 
     res.status = b'200 OK'
     res.headers[b'Content-Type'] = FRAMINGTYPE
 
     try:
-        objs = dispatch(repo, proto, command['command'], command['redirect'])
+        objs = dispatch(repo, proto, command[b'command'], command[b'redirect'])
 
         action, meta = reactor.oncommandresponsereadyobjects(
-            outstream, command['requestid'], objs
+            outstream, command[b'requestid'], objs
         )
 
     except error.WireprotoCommandError as e:
         action, meta = reactor.oncommanderror(
-            outstream, command['requestid'], e.message, e.messageargs
+            outstream, command[b'requestid'], e.message, e.messageargs
         )
 
     except Exception as e:
         action, meta = reactor.onservererror(
             outstream,
-            command['requestid'],
-            _('exception when invoking command: %s')
+            command[b'requestid'],
+            _(b'exception when invoking command: %s')
             % stringutil.forcebytestr(e),
         )
 
-    if action == 'sendframes':
-        res.setbodygen(meta['framegen'])
+    if action == b'sendframes':
+        res.setbodygen(meta[b'framegen'])
         return True
-    elif action == 'noop':
+    elif action == b'noop':
         return False
     else:
         raise error.ProgrammingError(
-            'unhandled event from reactor: %s' % action
+            b'unhandled event from reactor: %s' % action
         )
 
 
 def getdispatchrepo(repo, proto, command):
-    viewconfig = repo.ui.config('server', 'view')
+    viewconfig = repo.ui.config(b'server', b'view')
     return repo.filtered(viewconfig)
 
 
@@ -453,7 +453,7 @@
         cached = cacher.lookup()
 
         if cached:
-            for o in cached['objs']:
+            for o in cached[b'objs']:
                 yield o
             return
 
@@ -484,15 +484,16 @@
         extra = set(self._args) - set(args)
         if extra:
             raise error.WireprotoCommandError(
-                'unsupported argument to command: %s' % ', '.join(sorted(extra))
+                b'unsupported argument to command: %s'
+                % b', '.join(sorted(extra))
             )
 
         # And look for required arguments that are missing.
-        missing = {a for a in args if args[a]['required']} - set(self._args)
+        missing = {a for a in args if args[a][b'required']} - set(self._args)
 
         if missing:
             raise error.WireprotoCommandError(
-                'missing required arguments: %s' % ', '.join(sorted(missing))
+                b'missing required arguments: %s' % b', '.join(sorted(missing))
             )
 
         # Now derive the arguments to pass to the command, taking into
@@ -501,13 +502,13 @@
         for k, meta in sorted(args.items()):
             # This argument wasn't passed by the client.
             if k not in self._args:
-                data[k] = meta['default']()
+                data[k] = meta[b'default']()
                 continue
 
             v = self._args[k]
 
             # Sets may be expressed as lists. Silently normalize.
-            if meta['type'] == 'set' and isinstance(v, list):
+            if meta[b'type'] == b'set' and isinstance(v, list):
                 v = set(v)
 
             # TODO consider more/stronger type validation.
@@ -550,9 +551,9 @@
     transports.
     """
     caps = {
-        'commands': {},
-        'framingmediatypes': [FRAMINGTYPE],
-        'pathfilterprefixes': set(narrowspec.VALID_PREFIXES),
+        b'commands': {},
+        b'framingmediatypes': [FRAMINGTYPE],
+        b'pathfilterprefixes': set(narrowspec.VALID_PREFIXES),
     }
 
     for command, entry in COMMANDS.items():
@@ -562,15 +563,15 @@
             args[arg] = {
                 # TODO should this be a normalized type using CBOR's
                 # terminology?
-                b'type': meta['type'],
-                b'required': meta['required'],
+                b'type': meta[b'type'],
+                b'required': meta[b'required'],
             }
 
-            if not meta['required']:
-                args[arg][b'default'] = meta['default']()
+            if not meta[b'required']:
+                args[arg][b'default'] = meta[b'default']()
 
-            if meta['validvalues']:
-                args[arg][b'validvalues'] = meta['validvalues']
+            if meta[b'validvalues']:
+                args[arg][b'validvalues'] = meta[b'validvalues']
 
         # TODO this type of check should be defined in a per-command callback.
         if (
@@ -579,16 +580,16 @@
         ):
             continue
 
-        caps['commands'][command] = {
-            'args': args,
-            'permissions': [entry.permission],
+        caps[b'commands'][command] = {
+            b'args': args,
+            b'permissions': [entry.permission],
         }
 
         if entry.extracapabilitiesfn:
             extracaps = entry.extracapabilitiesfn(repo, proto)
-            caps['commands'][command].update(extracaps)
+            caps[b'commands'][command].update(extracaps)
 
-    caps['rawrepoformats'] = sorted(repo.requirements & repo.supportedformats)
+    caps[b'rawrepoformats'] = sorted(repo.requirements & repo.supportedformats)
 
     targets = getadvertisedredirecttargets(repo, proto)
     if targets:
@@ -599,12 +600,12 @@
 
         for target in targets:
             entry = {
-                b'name': target['name'],
-                b'protocol': target['protocol'],
-                b'uris': target['uris'],
+                b'name': target[b'name'],
+                b'protocol': target[b'protocol'],
+                b'uris': target[b'uris'],
             }
 
-            for key in ('snirequired', 'tlsversions'):
+            for key in (b'snirequired', b'tlsversions'):
                 if key in target:
                     entry[key] = target[key]
 
@@ -655,7 +656,7 @@
 def wireprotocommand(
     name,
     args=None,
-    permission='push',
+    permission=b'push',
     cachekeyfn=None,
     extracapabilitiesfn=None,
 ):
@@ -710,13 +711,13 @@
     under.
     """
     transports = {
-        k for k, v in wireprototypes.TRANSPORTS.items() if v['version'] == 2
+        k for k, v in wireprototypes.TRANSPORTS.items() if v[b'version'] == 2
     }
 
-    if permission not in ('push', 'pull'):
+    if permission not in (b'push', b'pull'):
         raise error.ProgrammingError(
-            'invalid wire protocol permission; '
-            'got %s; expected "push" or "pull"' % permission
+            b'invalid wire protocol permission; '
+            b'got %s; expected "push" or "pull"' % permission
         )
 
     if args is None:
@@ -724,48 +725,55 @@
 
     if not isinstance(args, dict):
         raise error.ProgrammingError(
-            'arguments for version 2 commands ' 'must be declared as dicts'
+            b'arguments for version 2 commands ' b'must be declared as dicts'
         )
 
     for arg, meta in args.items():
-        if arg == '*':
+        if arg == b'*':
             raise error.ProgrammingError(
-                '* argument name not allowed on ' 'version 2 commands'
+                b'* argument name not allowed on ' b'version 2 commands'
             )
 
         if not isinstance(meta, dict):
             raise error.ProgrammingError(
-                'arguments for version 2 commands '
-                'must declare metadata as a dict'
+                b'arguments for version 2 commands '
+                b'must declare metadata as a dict'
             )
 
-        if 'type' not in meta:
+        if b'type' not in meta:
             raise error.ProgrammingError(
-                '%s argument for command %s does not '
-                'declare type field' % (arg, name)
+                b'%s argument for command %s does not '
+                b'declare type field' % (arg, name)
             )
 
-        if meta['type'] not in ('bytes', 'int', 'list', 'dict', 'set', 'bool'):
+        if meta[b'type'] not in (
+            b'bytes',
+            b'int',
+            b'list',
+            b'dict',
+            b'set',
+            b'bool',
+        ):
             raise error.ProgrammingError(
-                '%s argument for command %s has '
-                'illegal type: %s' % (arg, name, meta['type'])
+                b'%s argument for command %s has '
+                b'illegal type: %s' % (arg, name, meta[b'type'])
             )
 
-        if 'example' not in meta:
+        if b'example' not in meta:
             raise error.ProgrammingError(
-                '%s argument for command %s does not '
-                'declare example field' % (arg, name)
+                b'%s argument for command %s does not '
+                b'declare example field' % (arg, name)
             )
 
-        meta['required'] = 'default' not in meta
+        meta[b'required'] = b'default' not in meta
 
-        meta.setdefault('default', lambda: None)
-        meta.setdefault('validvalues', None)
+        meta.setdefault(b'default', lambda: None)
+        meta.setdefault(b'validvalues', None)
 
     def register(func):
         if name in COMMANDS:
             raise error.ProgrammingError(
-                '%s command already registered ' 'for version 2' % name
+                b'%s command already registered ' b'for version 2' % name
             )
 
         COMMANDS[name] = wireprototypes.commandentry(
@@ -796,16 +804,18 @@
     * The repository path.
     """
     if not allargs:
-        raise error.ProgrammingError('only allargs=True is currently supported')
+        raise error.ProgrammingError(
+            b'only allargs=True is currently supported'
+        )
 
     if localversion is None:
-        raise error.ProgrammingError('must set localversion argument value')
+        raise error.ProgrammingError(b'must set localversion argument value')
 
     def cachekeyfn(repo, proto, cacher, **args):
         spec = COMMANDS[command]
 
         # Commands that mutate the repo can not be cached.
-        if spec.permission == 'push':
+        if spec.permission == b'push':
             return None
 
         # TODO config option to disable caching.
@@ -880,13 +890,13 @@
 
     if not isinstance(revisions, list):
         raise error.WireprotoCommandError(
-            'revisions must be defined as an ' 'array'
+            b'revisions must be defined as an ' b'array'
         )
 
     for spec in revisions:
         if b'type' not in spec:
             raise error.WireprotoCommandError(
-                'type key not present in revision specifier'
+                b'type key not present in revision specifier'
             )
 
         typ = spec[b'type']
@@ -894,8 +904,8 @@
         if typ == b'changesetexplicit':
             if b'nodes' not in spec:
                 raise error.WireprotoCommandError(
-                    'nodes key not present in changesetexplicit revision '
-                    'specifier'
+                    b'nodes key not present in changesetexplicit revision '
+                    b'specifier'
                 )
 
             for node in spec[b'nodes']:
@@ -907,8 +917,8 @@
             for key in (b'nodes', b'depth'):
                 if key not in spec:
                     raise error.WireprotoCommandError(
-                        '%s key not present in changesetexplicitdepth revision '
-                        'specifier',
+                        b'%s key not present in changesetexplicitdepth revision '
+                        b'specifier',
                         (key,),
                     )
 
@@ -925,14 +935,14 @@
             for key in (b'roots', b'heads'):
                 if key not in spec:
                     raise error.WireprotoCommandError(
-                        '%s key not present in changesetdagrange revision '
-                        'specifier',
+                        b'%s key not present in changesetdagrange revision '
+                        b'specifier',
                         (key,),
                     )
 
             if not spec[b'heads']:
                 raise error.WireprotoCommandError(
-                    'heads key in changesetdagrange cannot be empty'
+                    b'heads key in changesetdagrange cannot be empty'
                 )
 
             if spec[b'roots']:
@@ -947,39 +957,39 @@
 
         else:
             raise error.WireprotoCommandError(
-                'unknown revision specifier type: %s', (typ,)
+                b'unknown revision specifier type: %s', (typ,)
             )
 
     return nodes
 
 
-@wireprotocommand('branchmap', permission='pull')
+@wireprotocommand(b'branchmap', permission=b'pull')
 def branchmapv2(repo, proto):
     yield {encoding.fromlocal(k): v for k, v in repo.branchmap().iteritems()}
 
 
-@wireprotocommand('capabilities', permission='pull')
+@wireprotocommand(b'capabilities', permission=b'pull')
 def capabilitiesv2(repo, proto):
     yield _capabilitiesv2(repo, proto)
 
 
 @wireprotocommand(
-    'changesetdata',
+    b'changesetdata',
     args={
-        'revisions': {
-            'type': 'list',
-            'example': [
+        b'revisions': {
+            b'type': b'list',
+            b'example': [
                 {b'type': b'changesetexplicit', b'nodes': [b'abcdef...'],}
             ],
         },
-        'fields': {
-            'type': 'set',
-            'default': set,
-            'example': {b'parents', b'revision'},
-            'validvalues': {b'bookmarks', b'parents', b'phase', b'revision'},
+        b'fields': {
+            b'type': b'set',
+            b'default': set,
+            b'example': {b'parents', b'revision'},
+            b'validvalues': {b'bookmarks', b'parents', b'phase', b'revision'},
         },
     },
-    permission='pull',
+    permission=b'pull',
 )
 def changesetdata(repo, proto, revisions, fields):
     # TODO look for unknown fields and abort when they can't be serviced.
@@ -990,7 +1000,7 @@
     publishing = repo.publishing()
 
     if outgoing:
-        repo.hook('preoutgoing', throw=True, source='serve')
+        repo.hook(b'preoutgoing', throw=True, source=b'serve')
 
     yield {
         b'totalitems': len(outgoing),
@@ -1078,7 +1088,7 @@
     fl = repo.file(path)
 
     if not len(fl):
-        raise FileAccessError(path, 'unknown file: %s', (path,))
+        raise FileAccessError(path, b'unknown file: %s', (path,))
 
     return fl
 
@@ -1125,8 +1135,8 @@
             for pattern in pathfilter.get(key, []):
                 if not pattern.startswith((b'path:', b'rootfilesin:')):
                     raise error.WireprotoCommandError(
-                        '%s pattern must begin with `path:` or `rootfilesin:`; '
-                        'got %s',
+                        b'%s pattern must begin with `path:` or `rootfilesin:`; '
+                        b'got %s',
                         (key, pattern),
                     )
 
@@ -1146,27 +1156,27 @@
 
 
 @wireprotocommand(
-    'filedata',
+    b'filedata',
     args={
-        'haveparents': {
-            'type': 'bool',
-            'default': lambda: False,
-            'example': True,
+        b'haveparents': {
+            b'type': b'bool',
+            b'default': lambda: False,
+            b'example': True,
         },
-        'nodes': {'type': 'list', 'example': [b'0123456...'],},
-        'fields': {
-            'type': 'set',
-            'default': set,
-            'example': {b'parents', b'revision'},
-            'validvalues': {b'parents', b'revision', b'linknode'},
+        b'nodes': {b'type': b'list', b'example': [b'0123456...'],},
+        b'fields': {
+            b'type': b'set',
+            b'default': set,
+            b'example': {b'parents', b'revision'},
+            b'validvalues': {b'parents', b'revision', b'linknode'},
         },
-        'path': {'type': 'bytes', 'example': b'foo.txt',},
+        b'path': {b'type': b'bytes', b'example': b'foo.txt',},
     },
-    permission='pull',
+    permission=b'pull',
     # TODO censoring a file revision won't invalidate the cache.
     # Figure out a way to take censoring into account when deriving
     # the cache key.
-    cachekeyfn=makecommandcachekeyfn('filedata', 1, allargs=True),
+    cachekeyfn=makecommandcachekeyfn(b'filedata', 1, allargs=True),
 )
 def filedata(repo, proto, haveparents, nodes, fields, path):
     # TODO this API allows access to file revisions that are attached to
@@ -1188,7 +1198,7 @@
             store.rev(node)
         except error.LookupError:
             raise error.WireprotoCommandError(
-                'unknown file node: %s', (hex(node),)
+                b'unknown file node: %s', (hex(node),)
             )
 
         # TODO by creating the filectx against a specific file revision
@@ -1223,41 +1233,41 @@
 
 
 @wireprotocommand(
-    'filesdata',
+    b'filesdata',
     args={
-        'haveparents': {
-            'type': 'bool',
-            'default': lambda: False,
-            'example': True,
+        b'haveparents': {
+            b'type': b'bool',
+            b'default': lambda: False,
+            b'example': True,
         },
-        'fields': {
-            'type': 'set',
-            'default': set,
-            'example': {b'parents', b'revision'},
-            'validvalues': {
+        b'fields': {
+            b'type': b'set',
+            b'default': set,
+            b'example': {b'parents', b'revision'},
+            b'validvalues': {
                 b'firstchangeset',
                 b'linknode',
                 b'parents',
                 b'revision',
             },
         },
-        'pathfilter': {
-            'type': 'dict',
-            'default': lambda: None,
-            'example': {b'include': [b'path:tests']},
+        b'pathfilter': {
+            b'type': b'dict',
+            b'default': lambda: None,
+            b'example': {b'include': [b'path:tests']},
         },
-        'revisions': {
-            'type': 'list',
-            'example': [
+        b'revisions': {
+            b'type': b'list',
+            b'example': [
                 {b'type': b'changesetexplicit', b'nodes': [b'abcdef...'],}
             ],
         },
     },
-    permission='pull',
+    permission=b'pull',
     # TODO censoring a file revision won't invalidate the cache.
     # Figure out a way to take censoring into account when deriving
     # the cache key.
-    cachekeyfn=makecommandcachekeyfn('filesdata', 1, allargs=True),
+    cachekeyfn=makecommandcachekeyfn(b'filesdata', 1, allargs=True),
     extracapabilitiesfn=filesdatacapabilities,
 )
 def filesdata(repo, proto, haveparents, fields, pathfilter, revisions):
@@ -1327,29 +1337,33 @@
 
 
 @wireprotocommand(
-    'heads',
+    b'heads',
     args={
-        'publiconly': {
-            'type': 'bool',
-            'default': lambda: False,
-            'example': False,
+        b'publiconly': {
+            b'type': b'bool',
+            b'default': lambda: False,
+            b'example': False,
         },
     },
-    permission='pull',
+    permission=b'pull',
 )
 def headsv2(repo, proto, publiconly):
     if publiconly:
-        repo = repo.filtered('immutable')
+        repo = repo.filtered(b'immutable')
 
     yield repo.heads()
 
 
 @wireprotocommand(
-    'known',
+    b'known',
     args={
-        'nodes': {'type': 'list', 'default': list, 'example': [b'deadbeef'],},
+        b'nodes': {
+            b'type': b'list',
+            b'default': list,
+            b'example': [b'deadbeef'],
+        },
     },
-    permission='pull',
+    permission=b'pull',
 )
 def knownv2(repo, proto, nodes):
     result = b''.join(b'1' if n else b'0' for n in repo.known(nodes))
@@ -1357,9 +1371,9 @@
 
 
 @wireprotocommand(
-    'listkeys',
-    args={'namespace': {'type': 'bytes', 'example': b'ns',},},
-    permission='pull',
+    b'listkeys',
+    args={b'namespace': {b'type': b'bytes', b'example': b'ns',},},
+    permission=b'pull',
 )
 def listkeysv2(repo, proto, namespace):
     keys = repo.listkeys(encoding.tolocal(namespace))
@@ -1372,9 +1386,9 @@
 
 
 @wireprotocommand(
-    'lookup',
-    args={'key': {'type': 'bytes', 'example': b'foo',},},
-    permission='pull',
+    b'lookup',
+    args={b'key': {b'type': b'bytes', b'example': b'foo',},},
+    permission=b'pull',
 )
 def lookupv2(repo, proto, key):
     key = encoding.tolocal(key)
@@ -1396,24 +1410,24 @@
 
 
 @wireprotocommand(
-    'manifestdata',
+    b'manifestdata',
     args={
-        'nodes': {'type': 'list', 'example': [b'0123456...'],},
-        'haveparents': {
-            'type': 'bool',
-            'default': lambda: False,
-            'example': True,
+        b'nodes': {b'type': b'list', b'example': [b'0123456...'],},
+        b'haveparents': {
+            b'type': b'bool',
+            b'default': lambda: False,
+            b'example': True,
         },
-        'fields': {
-            'type': 'set',
-            'default': set,
-            'example': {b'parents', b'revision'},
-            'validvalues': {b'parents', b'revision'},
+        b'fields': {
+            b'type': b'set',
+            b'default': set,
+            b'example': {b'parents', b'revision'},
+            b'validvalues': {b'parents', b'revision'},
         },
-        'tree': {'type': 'bytes', 'example': b'',},
+        b'tree': {b'type': b'bytes', b'example': b'',},
     },
-    permission='pull',
-    cachekeyfn=makecommandcachekeyfn('manifestdata', 1, allargs=True),
+    permission=b'pull',
+    cachekeyfn=makecommandcachekeyfn(b'manifestdata', 1, allargs=True),
     extracapabilitiesfn=manifestdatacapabilities,
 )
 def manifestdata(repo, proto, haveparents, nodes, fields, tree):
@@ -1424,7 +1438,7 @@
         try:
             store.rev(node)
         except error.LookupError:
-            raise error.WireprotoCommandError('unknown node: %s', (node,))
+            raise error.WireprotoCommandError(b'unknown node: %s', (node,))
 
     revisions = store.emitrevisions(
         nodes,
@@ -1466,14 +1480,14 @@
 
 
 @wireprotocommand(
-    'pushkey',
+    b'pushkey',
     args={
-        'namespace': {'type': 'bytes', 'example': b'ns',},
-        'key': {'type': 'bytes', 'example': b'key',},
-        'old': {'type': 'bytes', 'example': b'old',},
-        'new': {'type': 'bytes', 'example': 'new',},
+        b'namespace': {b'type': b'bytes', b'example': b'ns',},
+        b'key': {b'type': b'bytes', b'example': b'key',},
+        b'old': {b'type': b'bytes', b'example': b'old',},
+        b'new': {b'type': b'bytes', b'example': b'new',},
     },
-    permission='push',
+    permission=b'push',
 )
 def pushkeyv2(repo, proto, namespace, key, old, new):
     # TODO handle ui output redirection
@@ -1486,16 +1500,19 @@
 
 
 @wireprotocommand(
-    'rawstorefiledata',
+    b'rawstorefiledata',
     args={
-        'files': {'type': 'list', 'example': [b'changelog', b'manifestlog'],},
-        'pathfilter': {
-            'type': 'list',
-            'default': lambda: None,
-            'example': {b'include': [b'path:tests']},
+        b'files': {
+            b'type': b'list',
+            b'example': [b'changelog', b'manifestlog'],
+        },
+        b'pathfilter': {
+            b'type': b'list',
+            b'default': lambda: None,
+            b'example': {b'include': [b'path:tests']},
         },
     },
-    permission='pull',
+    permission=b'pull',
 )
 def rawstorefiledata(repo, proto, files, pathfilter):
     if not streamclone.allowservergeneration(repo):
@@ -1546,7 +1563,7 @@
         # We have to use a closure for this to ensure the context manager is
         # closed only after sending the final chunk.
         def getfiledata():
-            with repo.svfs(name, 'rb', auditpath=False) as fh:
+            with repo.svfs(name, b'rb', auditpath=False) as fh:
                 for chunk in util.filechunkiter(fh, limit=size):
                     yield chunk
 
--- a/mercurial/worker.py	Sun Oct 06 09:45:02 2019 -0400
+++ b/mercurial/worker.py	Sun Oct 06 09:48:39 2019 -0400
@@ -44,7 +44,7 @@
 
     # windows
     try:
-        n = int(encoding.environ['NUMBER_OF_PROCESSORS'])
+        n = int(encoding.environ[b'NUMBER_OF_PROCESSORS'])
         if n > 0:
             return n
     except (KeyError, ValueError):
@@ -54,14 +54,14 @@
 
 
 def _numworkers(ui):
-    s = ui.config('worker', 'numcpus')
+    s = ui.config(b'worker', b'numcpus')
     if s:
         try:
             n = int(s)
             if n >= 1:
                 return n
         except ValueError:
-            raise error.Abort(_('number of cpus must be an integer'))
+            raise error.Abort(_(b'number of cpus must be an integer'))
     return min(max(countcpus(), 4), 32)
 
 
@@ -115,7 +115,7 @@
     a thread-based worker. Should be disabled for CPU heavy tasks that don't
     release the GIL.
     '''
-    enabled = ui.configbool('worker', 'enabled')
+    enabled = ui.configbool(b'worker', b'enabled')
     if enabled and worthwhile(ui, costperarg, len(args), threadsafe=threadsafe):
         return _platformworker(ui, func, staticargs, args, hasretval)
     return func(*staticargs + (args,))
@@ -331,8 +331,8 @@
                 # task and does not get to handle the interruption.
                 ui.warn(
                     _(
-                        "failed to kill worker threads while "
-                        "handling an exception\n"
+                        b"failed to kill worker threads while "
+                        b"handling an exception\n"
                     )
                 )
                 return
--- a/tests/test-hook.t	Sun Oct 06 09:45:02 2019 -0400
+++ b/tests/test-hook.t	Sun Oct 06 09:48:39 2019 -0400
@@ -1138,7 +1138,6 @@
   Traceback (most recent call last):
   ImportError: No module named hgext_importfail
   Traceback (most recent call last):
-      "precommit", throw=True, parent1=hookp1, parent2=hookp2
   HookLoadError: precommit.importfail hook is invalid: import of "importfail" failed
   abort: precommit.importfail hook is invalid: import of "importfail" failed
 
--- a/tests/test-subrepo.t	Sun Oct 06 09:45:02 2019 -0400
+++ b/tests/test-subrepo.t	Sun Oct 06 09:48:39 2019 -0400
@@ -1119,7 +1119,7 @@
 Ensure a full traceback, not just the SubrepoAbort part
 
   $ hg -R issue1852b update --traceback 2>&1 | grep 'raise error\.Abort'
-      raise error.Abort(_("default path for subrepository not found"))
+      raise error.Abort(_(b"default path for subrepository not found"))
 
 Pull -u now doesn't help